diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ff5126c1..8cb43804 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,16 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index a3b6ca04..447f9a1e 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -10,7 +10,6 @@ branchProtectionRules: - 'Kokoro' - 'cla/google' - 'Samples - Lint' - - 'Samples - Python 3.6' - 'Samples - Python 3.7' - 'Samples - Python 3.8' - 'Samples - Python 3.9' diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..f7b8344c --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..1e8b05c3 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 00000000..32b0f005 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-results/ + - name: Report coverage results + run: | + coverage combine .coverage-results/.coverage* + coverage report --show-missing --fail-under=98 diff --git a/.kokoro/release.sh b/.kokoro/release.sh index c1b5619e..263ff191 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-bigquery-storage python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 401827a3..740a22bc 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-bigquery-storage/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 6e239143..4357d60b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,26 @@ [1]: https://pypi.org/project/google-cloud-bigquery-storage/#history +## [2.12.0](https://github.com/googleapis/python-bigquery-storage/compare/v2.11.0...v2.12.0) (2022-02-22) + + +### Features + +* add api key support ([#387](https://github.com/googleapis/python-bigquery-storage/issues/387)) ([5e7b502](https://github.com/googleapis/python-bigquery-storage/commit/5e7b5023ab9d9bcfa7661a19a79f05c07cded77e)) +* add trace_id for Read API ([#396](https://github.com/googleapis/python-bigquery-storage/issues/396)) ([5d7f918](https://github.com/googleapis/python-bigquery-storage/commit/5d7f9188a9ae3db7b3cfb6cc6789dcb071723166)) + + +### Bug Fixes + +* remove bigquery.readonly auth scope ([#394](https://github.com/googleapis/python-bigquery-storage/issues/394)) ([e08d2fd](https://github.com/googleapis/python-bigquery-storage/commit/e08d2fd146153709ce09af751c9437b2365313f0)) +* remove libcst as a required dependency ([#389](https://github.com/googleapis/python-bigquery-storage/issues/389)) ([92b503a](https://github.com/googleapis/python-bigquery-storage/commit/92b503a4ec17f8fc8dabfc24b58ac58fe10eb57f)) +* resolve DuplicateCredentialArgs error when using credentials_file ([16520e3](https://github.com/googleapis/python-bigquery-storage/commit/16520e3c3386c412bdaf545994264d66ee641588)) + + +### Documentation + +* add generated snippets ([e08d2fd](https://github.com/googleapis/python-bigquery-storage/commit/e08d2fd146153709ce09af751c9437b2365313f0)) + ## [2.11.0](https://github.com/googleapis/python-bigquery-storage/compare/v2.10.1...v2.11.0) (2022-01-12) diff --git a/UPGRADING.md b/UPGRADING.md index cab98087..ef35b962 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -132,11 +132,12 @@ wrapper around the auto-generated `read_rows()` method. python3 -m pip install google-cloud-bigquery-storage ``` -* The script `fixup_storage_v1_keywords.py` is shipped with the library. It expects -an input directory (with the code to convert) and an empty destination directory. +* The script `fixup_bigquery_storage_v1_keywords.py` is shipped with the library. It +requires `libcst` to be installed. It expects an input directory (with the code +to convert) and an empty destination directory. ```sh -$ scripts/fixup_storage_v1_keywords.py --input-directory .samples/ --output-directory samples/ +$ fixup_bigquery_storage_v1_keywords.py --input-directory .samples/ --output-directory samples/ ``` **Before:** diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py index 52c9642d..2d5cbaf0 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py @@ -16,7 +16,16 @@ from collections import OrderedDict import functools import re -from typing import Dict, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Optional, + AsyncIterable, + Awaitable, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core.client_options import ClientOptions @@ -111,6 +120,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BigQueryReadClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> BigQueryReadTransport: """Returns the transport used by the client instance. @@ -186,8 +231,8 @@ async def create_read_session( the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push- - down filter describing the rows to be returned. + data to be read, such as a list of columns or a + push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the @@ -206,6 +251,26 @@ async def create_read_session( are created and do not require manual clean-up by the caller. + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateReadSessionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_read_session(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest, dict]): The request object. Request message for @@ -226,9 +291,9 @@ async def create_read_session( Max initial number of streams. If unset or zero, the server will provide a value of streams so as to produce - reasonable throughput. Must be non- - negative. The number of streams may be - lower than the requested number, + reasonable throughput. Must be + non-negative. The number of streams may + be lower than the requested number, depending on the amount parallelism that is reasonable for the table. Error will be returned if the max count is greater @@ -252,7 +317,7 @@ async def create_read_session( Information about the ReadSession. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, read_session, max_stream_count]) if request is not None and has_flattened_params: @@ -323,6 +388,27 @@ def read_rows( Each request also returns a set of stream statistics reflecting the current state of the stream. + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_read_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.ReadRowsRequest( + read_stream="read_stream_value", + ) + + # Make the request + stream = client.read_rows(request=request) + + # Handle the response + for response in stream: + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.ReadRowsRequest, dict]): The request object. Request message for `ReadRows`. @@ -354,7 +440,7 @@ def read_rows( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([read_stream, offset]) if request is not None and has_flattened_params: @@ -425,6 +511,26 @@ async def split_read_stream( original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion. + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.SplitReadStreamRequest( + name="name_value", + ) + + # Make the request + response = client.split_read_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest, dict]): The request object. Request message for diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py index 90067988..a99caee1 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py @@ -271,6 +271,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -321,57 +388,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, BigQueryReadTransport): # transport is a BigQueryReadTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -383,6 +415,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, @@ -410,8 +451,8 @@ def create_read_session( the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push- - down filter describing the rows to be returned. + data to be read, such as a list of columns or a + push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the @@ -430,6 +471,27 @@ def create_read_session( are created and do not require manual clean-up by the caller. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateReadSessionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_read_session(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest, dict]): The request object. Request message for @@ -450,9 +512,9 @@ def create_read_session( Max initial number of streams. If unset or zero, the server will provide a value of streams so as to produce - reasonable throughput. Must be non- - negative. The number of streams may be - lower than the requested number, + reasonable throughput. Must be + non-negative. The number of streams may + be lower than the requested number, depending on the amount parallelism that is reasonable for the table. Error will be returned if the max count is greater @@ -476,7 +538,7 @@ def create_read_session( Information about the ReadSession. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, read_session, max_stream_count]) if request is not None and has_flattened_params: @@ -537,6 +599,28 @@ def read_rows( Each request also returns a set of stream statistics reflecting the current state of the stream. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_read_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.ReadRowsRequest( + read_stream="read_stream_value", + ) + + # Make the request + stream = client.read_rows(request=request) + + # Handle the response + for response in stream: + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.ReadRowsRequest, dict]): The request object. Request message for `ReadRows`. @@ -568,7 +652,7 @@ def read_rows( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([read_stream, offset]) if request is not None and has_flattened_params: @@ -630,6 +714,27 @@ def split_read_stream( original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.SplitReadStreamRequest( + name="name_value", + ) + + # Make the request + response = client.split_read_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest, dict]): The request object. Request message for diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py index 40e60cea..68eb4859 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py @@ -43,7 +43,6 @@ class BigQueryReadTransport(abc.ABC): AUTH_SCOPES = ( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ) diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py index 8ce1da7c..4c3505ef 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py @@ -160,8 +160,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -237,8 +240,8 @@ def create_read_session( the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push- - down filter describing the rows to be returned. + data to be read, such as a list of columns or a + push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py index 05f54281..6f449391 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py @@ -205,8 +205,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -239,8 +242,8 @@ def create_read_session( the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push- - down filter describing the rows to be returned. + data to be read, such as a list of columns or a + push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py b/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py index 3aa61949..c108f84b 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Optional, AsyncIterable, Awaitable, AsyncIterator, @@ -124,6 +125,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BigQueryWriteClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> BigQueryWriteTransport: """Returns the transport used by the client instance. @@ -202,6 +239,26 @@ async def create_write_stream( stream is considered committed as soon as an acknowledgement is received. + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateWriteStreamRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest, dict]): The request object. Request message for @@ -233,7 +290,7 @@ async def create_write_stream( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, write_stream]) if request is not None and has_flattened_params: @@ -324,6 +381,37 @@ def append_rows( rpc), and the stream is explicitly committed via the ``BatchCommitWriteStreams`` rpc. + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_append_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.AppendRowsRequest( + write_stream="write_stream_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.append_rows(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + Args: requests (AsyncIterator[`google.cloud.bigquery_storage_v1.types.AppendRowsRequest`]): The request object AsyncIterator. Request message for `AppendRows`. @@ -380,6 +468,25 @@ async def get_write_stream( ) -> stream.WriteStream: r"""Gets information about a write stream. + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.GetWriteStreamRequest( + name="name_value", + ) + + # Make the request + response = client.get_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest, dict]): The request object. Request message for @@ -405,7 +512,7 @@ async def get_write_stream( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -463,6 +570,26 @@ async def finalize_write_stream( r"""Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream. + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FinalizeWriteStreamRequest( + name="name_value", + ) + + # Make the request + response = client.finalize_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest, dict]): The request object. Request message for invoking @@ -485,7 +612,7 @@ async def finalize_write_stream( Response message for FinalizeWriteStream. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -547,6 +674,27 @@ async def batch_commit_write_streams( multiple times. Once a stream is committed, data in the stream becomes available for read operations. + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=['write_streams_value_1', 'write_streams_value_2'], + ) + + # Make the request + response = client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest, dict]): The request object. Request message for @@ -570,7 +718,7 @@ async def batch_commit_write_streams( Response message for BatchCommitWriteStreams. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -636,6 +784,26 @@ async def flush_rows( Flush is not supported on the \_default stream, since it is not BUFFERED. + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FlushRowsRequest( + write_stream="write_stream_value", + ) + + # Make the request + response = client.flush_rows(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.FlushRowsRequest, dict]): The request object. Request message for `FlushRows`. @@ -657,7 +825,7 @@ async def flush_rows( Respond message for FlushRows. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([write_stream]) if request is not None and has_flattened_params: diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/client.py b/google/cloud/bigquery_storage_v1/services/big_query_write/client.py index f19762b2..5b0a4360 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/client.py @@ -255,6 +255,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -305,57 +372,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, BigQueryWriteTransport): # transport is a BigQueryWriteTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -367,6 +399,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, @@ -397,6 +438,27 @@ def create_write_stream( stream is considered committed as soon as an acknowledgement is received. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateWriteStreamRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest, dict]): The request object. Request message for @@ -428,7 +490,7 @@ def create_write_stream( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, write_stream]) if request is not None and has_flattened_params: @@ -509,6 +571,38 @@ def append_rows( rpc), and the stream is explicitly committed via the ``BatchCommitWriteStreams`` rpc. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_append_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.AppendRowsRequest( + write_stream="write_stream_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.append_rows(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + Args: requests (Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]): The request object iterator. Request message for `AppendRows`. @@ -552,6 +646,26 @@ def get_write_stream( ) -> stream.WriteStream: r"""Gets information about a write stream. + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.GetWriteStreamRequest( + name="name_value", + ) + + # Make the request + response = client.get_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest, dict]): The request object. Request message for @@ -577,7 +691,7 @@ def get_write_stream( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -625,6 +739,27 @@ def finalize_write_stream( r"""Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FinalizeWriteStreamRequest( + name="name_value", + ) + + # Make the request + response = client.finalize_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest, dict]): The request object. Request message for invoking @@ -647,7 +782,7 @@ def finalize_write_stream( Response message for FinalizeWriteStream. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -699,6 +834,28 @@ def batch_commit_write_streams( multiple times. Once a stream is committed, data in the stream becomes available for read operations. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=['write_streams_value_1', 'write_streams_value_2'], + ) + + # Make the request + response = client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest, dict]): The request object. Request message for @@ -722,7 +879,7 @@ def batch_commit_write_streams( Response message for BatchCommitWriteStreams. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -780,6 +937,27 @@ def flush_rows( Flush is not supported on the \_default stream, since it is not BUFFERED. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1 + + def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FlushRowsRequest( + write_stream="write_stream_value", + ) + + # Make the request + response = client.flush_rows(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1.types.FlushRowsRequest, dict]): The request object. Request message for `FlushRows`. @@ -801,7 +979,7 @@ def flush_rows( Respond message for FlushRows. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([write_stream]) if request is not None and has_flattened_params: diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc.py b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc.py index 68a1a685..3f8d0b51 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc.py @@ -162,8 +162,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py index facec1eb..bfbaceb6 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py @@ -207,8 +207,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/bigquery_storage_v1/types/arrow.py b/google/cloud/bigquery_storage_v1/types/arrow.py index 99c77b3e..862e915b 100644 --- a/google/cloud/bigquery_storage_v1/types/arrow.py +++ b/google/cloud/bigquery_storage_v1/types/arrow.py @@ -26,8 +26,7 @@ class ArrowSchema(proto.Message): r"""Arrow schema as specified in https://arrow.apache.org/docs/python/api/datatypes.html and serialized to bytes using IPC: - https://arrow.apache.org/docs/format/Columnar.html#serialization- - and-interprocess-communication-ipc + https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc See code samples on how this message can be deserialized. Attributes: diff --git a/google/cloud/bigquery_storage_v1/types/protobuf.py b/google/cloud/bigquery_storage_v1/types/protobuf.py index 4e73b375..9229d61b 100644 --- a/google/cloud/bigquery_storage_v1/types/protobuf.py +++ b/google/cloud/bigquery_storage_v1/types/protobuf.py @@ -54,9 +54,10 @@ class ProtoRows(proto.Message): serialized_rows (Sequence[bytes]): A sequence of rows serialized as a Protocol Buffer. - See https://developers.google.com/protocol- - buffers/docs/overview for more information on - deserializing this field. + See + https://developers.google.com/protocol-buffers/docs/overview + for more information on deserializing this + field. """ serialized_rows = proto.RepeatedField(proto.BYTES, number=1,) diff --git a/google/cloud/bigquery_storage_v1/types/stream.py b/google/cloud/bigquery_storage_v1/types/stream.py index aa527022..c9c31218 100644 --- a/google/cloud/bigquery_storage_v1/types/stream.py +++ b/google/cloud/bigquery_storage_v1/types/stream.py @@ -87,6 +87,15 @@ class ReadSession(proto.Message): are completely consumed. This estimate is based on metadata from the table which might be incomplete or stale. + trace_id (str): + Optional. ID set by client to annotate a + session identity. This does not need to be + strictly unique, but instead the same ID should + be used to group logically connected sessions + (e.g. All using the same ID for all sessions + needed to complete a Spark SQL query is + reasonable). + Maximum length is 256 bytes. """ class TableModifiers(proto.Message): @@ -154,6 +163,7 @@ class TableReadOptions(proto.Message): read_options = proto.Field(proto.MESSAGE, number=8, message=TableReadOptions,) streams = proto.RepeatedField(proto.MESSAGE, number=10, message="ReadStream",) estimated_total_bytes_scanned = proto.Field(proto.INT64, number=12,) + trace_id = proto.Field(proto.STRING, number=13,) class ReadStream(proto.Message): diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py index 7503dc70..8e0398c6 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py @@ -16,7 +16,16 @@ from collections import OrderedDict import functools import re -from typing import Dict, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Optional, + AsyncIterable, + Awaitable, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core.client_options import ClientOptions @@ -113,6 +122,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BigQueryReadClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> BigQueryReadTransport: """Returns the transport used by the client instance. @@ -188,8 +233,8 @@ async def create_read_session( the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push- - down filter describing the rows to be returned. + data to be read, such as a list of columns or a + push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the @@ -208,6 +253,26 @@ async def create_read_session( are created and do not require manual clean-up by the caller. + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateReadSessionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_read_session(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest, dict]): The request object. Request message for @@ -228,9 +293,9 @@ async def create_read_session( Max initial number of streams. If unset or zero, the server will provide a value of streams so as to produce - reasonable throughput. Must be non- - negative. The number of streams may be - lower than the requested number, + reasonable throughput. Must be + non-negative. The number of streams may + be lower than the requested number, depending on the amount parallelism that is reasonable for the table. Error will be returned if the max count is greater @@ -254,7 +319,7 @@ async def create_read_session( Information about the ReadSession. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, read_session, max_stream_count]) if request is not None and has_flattened_params: @@ -325,6 +390,27 @@ def read_rows( Each request also returns a set of stream statistics reflecting the current state of the stream. + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_read_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.ReadRowsRequest( + read_stream="read_stream_value", + ) + + # Make the request + stream = client.read_rows(request=request) + + # Handle the response + for response in stream: + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest, dict]): The request object. Request message for `ReadRows`. @@ -356,7 +442,7 @@ def read_rows( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([read_stream, offset]) if request is not None and has_flattened_params: @@ -427,6 +513,26 @@ async def split_read_stream( original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion. + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.SplitReadStreamRequest( + name="name_value", + ) + + # Make the request + response = client.split_read_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest, dict]): The request object. Request message for diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py index 5e79893f..20a3584e 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py @@ -273,6 +273,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -323,57 +390,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, BigQueryReadTransport): # transport is a BigQueryReadTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -385,6 +417,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, @@ -412,8 +453,8 @@ def create_read_session( the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push- - down filter describing the rows to be returned. + data to be read, such as a list of columns or a + push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the @@ -432,6 +473,27 @@ def create_read_session( are created and do not require manual clean-up by the caller. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateReadSessionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_read_session(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest, dict]): The request object. Request message for @@ -452,9 +514,9 @@ def create_read_session( Max initial number of streams. If unset or zero, the server will provide a value of streams so as to produce - reasonable throughput. Must be non- - negative. The number of streams may be - lower than the requested number, + reasonable throughput. Must be + non-negative. The number of streams may + be lower than the requested number, depending on the amount parallelism that is reasonable for the table. Error will be returned if the max count is greater @@ -478,7 +540,7 @@ def create_read_session( Information about the ReadSession. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, read_session, max_stream_count]) if request is not None and has_flattened_params: @@ -539,6 +601,28 @@ def read_rows( Each request also returns a set of stream statistics reflecting the current state of the stream. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_read_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.ReadRowsRequest( + read_stream="read_stream_value", + ) + + # Make the request + stream = client.read_rows(request=request) + + # Handle the response + for response in stream: + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest, dict]): The request object. Request message for `ReadRows`. @@ -570,7 +654,7 @@ def read_rows( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([read_stream, offset]) if request is not None and has_flattened_params: @@ -632,6 +716,27 @@ def split_read_stream( original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.SplitReadStreamRequest( + name="name_value", + ) + + # Make the request + response = client.split_read_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest, dict]): The request object. Request message for diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py index deac9b75..817222f2 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py @@ -43,7 +43,6 @@ class BigQueryReadTransport(abc.ABC): AUTH_SCOPES = ( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ) diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc.py index 8bafe6c9..0519e51e 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc.py @@ -162,8 +162,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -239,8 +242,8 @@ def create_read_session( the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push- - down filter describing the rows to be returned. + data to be read, such as a list of columns or a + push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py index b77a0036..d13ddf1a 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py @@ -207,8 +207,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -241,8 +244,8 @@ def create_read_session( the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the - data to be read, such as a list of columns or a push- - down filter describing the rows to be returned. + data to be read, such as a list of columns or a + push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/async_client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/async_client.py index 6b72a59a..33ecaa63 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/async_client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Optional, AsyncIterable, Awaitable, AsyncIterator, @@ -122,6 +123,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BigQueryWriteClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> BigQueryWriteTransport: """Returns the transport used by the client instance. @@ -200,6 +237,26 @@ async def create_write_stream( stream is considered committed as soon as an acknowledgement is received. + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateWriteStreamRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest, dict]): The request object. Request message for @@ -231,7 +288,7 @@ async def create_write_stream( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, write_stream]) if request is not None and has_flattened_params: @@ -310,6 +367,37 @@ def append_rows( If the stream is of ``PENDING`` type, data will only be available for read operations after the stream is committed. + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_append_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.AppendRowsRequest( + write_stream="write_stream_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1beta2.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.append_rows(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + Args: requests (AsyncIterator[`google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest`]): The request object AsyncIterator. Request message for `AppendRows`. @@ -363,6 +451,25 @@ async def get_write_stream( ) -> stream.WriteStream: r"""Gets a write stream. + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.GetWriteStreamRequest( + name="name_value", + ) + + # Make the request + response = client.get_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest, dict]): The request object. Request message for @@ -388,7 +495,7 @@ async def get_write_stream( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -446,6 +553,26 @@ async def finalize_write_stream( r"""Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream. + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest( + name="name_value", + ) + + # Make the request + response = client.finalize_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest, dict]): The request object. Request message for invoking @@ -468,7 +595,7 @@ async def finalize_write_stream( Response message for FinalizeWriteStream. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -529,6 +656,27 @@ async def batch_commit_write_streams( committed, data in the stream becomes available for read operations. + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=['write_streams_value_1', 'write_streams_value_2'], + ) + + # Make the request + response = client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest, dict]): The request object. Request message for @@ -552,7 +700,7 @@ async def batch_commit_write_streams( Response message for BatchCommitWriteStreams. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -614,6 +762,26 @@ async def flush_rows( offset specified in the request. Flush is not supported on the \_default stream, since it is not BUFFERED. + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FlushRowsRequest( + write_stream="write_stream_value", + ) + + # Make the request + response = client.flush_rows(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest, dict]): The request object. Request message for `FlushRows`. @@ -635,7 +803,7 @@ async def flush_rows( Respond message for FlushRows. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([write_stream]) if request is not None and has_flattened_params: diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py index 267809b5..3617a7dc 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py @@ -253,6 +253,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -303,57 +370,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, BigQueryWriteTransport): # transport is a BigQueryWriteTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -365,6 +397,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, @@ -395,6 +436,27 @@ def create_write_stream( stream is considered committed as soon as an acknowledgement is received. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateWriteStreamRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest, dict]): The request object. Request message for @@ -426,7 +488,7 @@ def create_write_stream( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, write_stream]) if request is not None and has_flattened_params: @@ -494,6 +556,38 @@ def append_rows( If the stream is of ``PENDING`` type, data will only be available for read operations after the stream is committed. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_append_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.AppendRowsRequest( + write_stream="write_stream_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1beta2.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.append_rows(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + Args: requests (Iterator[google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest]): The request object iterator. Request message for `AppendRows`. @@ -533,6 +627,26 @@ def get_write_stream( ) -> stream.WriteStream: r"""Gets a write stream. + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.GetWriteStreamRequest( + name="name_value", + ) + + # Make the request + response = client.get_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest, dict]): The request object. Request message for @@ -558,7 +672,7 @@ def get_write_stream( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -606,6 +720,27 @@ def finalize_write_stream( r"""Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest( + name="name_value", + ) + + # Make the request + response = client.finalize_write_stream(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest, dict]): The request object. Request message for invoking @@ -628,7 +763,7 @@ def finalize_write_stream( Response message for FinalizeWriteStream. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -679,6 +814,28 @@ def batch_commit_write_streams( committed, data in the stream becomes available for read operations. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=['write_streams_value_1', 'write_streams_value_2'], + ) + + # Make the request + response = client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest, dict]): The request object. Request message for @@ -702,7 +859,7 @@ def batch_commit_write_streams( Response message for BatchCommitWriteStreams. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -756,6 +913,27 @@ def flush_rows( offset specified in the request. Flush is not supported on the \_default stream, since it is not BUFFERED. + + + .. code-block:: + + from google.cloud import bigquery_storage_v1beta2 + + def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FlushRowsRequest( + write_stream="write_stream_value", + ) + + # Make the request + response = client.flush_rows(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest, dict]): The request object. Request message for `FlushRows`. @@ -777,7 +955,7 @@ def flush_rows( Respond message for FlushRows. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([write_stream]) if request is not None and has_flattened_params: diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc.py index 6dbf64ab..567a8256 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc.py @@ -160,8 +160,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py index fc5eba8b..14f10ad4 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py @@ -205,8 +205,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/bigquery_storage_v1beta2/types/arrow.py b/google/cloud/bigquery_storage_v1beta2/types/arrow.py index 34591509..7c164bab 100644 --- a/google/cloud/bigquery_storage_v1beta2/types/arrow.py +++ b/google/cloud/bigquery_storage_v1beta2/types/arrow.py @@ -26,8 +26,7 @@ class ArrowSchema(proto.Message): r"""Arrow schema as specified in https://arrow.apache.org/docs/python/api/datatypes.html and serialized to bytes using IPC: - https://arrow.apache.org/docs/format/Columnar.html#serialization- - and-interprocess-communication-ipc + https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc See code samples on how this message can be deserialized. Attributes: diff --git a/google/cloud/bigquery_storage_v1beta2/types/protobuf.py b/google/cloud/bigquery_storage_v1beta2/types/protobuf.py index 7810c251..3d0df307 100644 --- a/google/cloud/bigquery_storage_v1beta2/types/protobuf.py +++ b/google/cloud/bigquery_storage_v1beta2/types/protobuf.py @@ -34,8 +34,7 @@ class ProtoSchema(proto.Message): has to be self contained, including all the nested types, excepted for proto buffer well known types - (https://developers.google.com/protocol- - buffers/docs/reference/google.protobuf). + (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf). """ proto_descriptor = proto.Field( @@ -50,9 +49,10 @@ class ProtoRows(proto.Message): serialized_rows (Sequence[bytes]): A sequence of rows serialized as a Protocol Buffer. - See https://developers.google.com/protocol- - buffers/docs/overview for more information on - deserializing this field. + See + https://developers.google.com/protocol-buffers/docs/overview + for more information on deserializing this + field. """ serialized_rows = proto.RepeatedField(proto.BYTES, number=1,) diff --git a/noxfile.py b/noxfile.py index 9e0e39e3..25887b3b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -182,7 +182,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=95") + session.run("coverage", "report", "--show-missing", "--fail-under=98") session.run("coverage", "erase") diff --git a/owlbot.py b/owlbot.py index 3d6771db..0371e338 100644 --- a/owlbot.py +++ b/owlbot.py @@ -136,12 +136,14 @@ unit_test_extras=unit_test_extras, system_test_extras=extras, system_test_external_dependencies=["google-cloud-bigquery"], - cov_level=95, + cov_level=98, ) s.move( templated_files, excludes=[".coveragerc"] ) # microgenerator has a good .coveragerc file +# Work around bug in templates https://github.com/googleapis/synthtool/pull/1335 +s.replace(".github/workflows/unittest.yml", "--fail-under=100", "--fail-under=98") # ---------------------------------------------------------------------------- # Samples templates @@ -149,51 +151,4 @@ python.py_samples(skip_readmes=True) -# Remove the replacements below once -# https://github.com/googleapis/synthtool/pull/1188 is merged - -# Update googleapis/repo-automation-bots repo to main in .kokoro/*.sh files -s.replace( - ".kokoro/*.sh", - "repo-automation-bots/tree/master", - "repo-automation-bots/tree/main", -) - -# Customize CONTRIBUTING.rst to replace master with main -s.replace( - "CONTRIBUTING.rst", - "fetch and merge changes from upstream into master", - "fetch and merge changes from upstream into main", -) - -s.replace( - "CONTRIBUTING.rst", "git merge upstream/master", "git merge upstream/main", -) - -s.replace( - "CONTRIBUTING.rst", - """export GOOGLE_CLOUD_TESTING_BRANCH=\"master\"""", - """export GOOGLE_CLOUD_TESTING_BRANCH=\"main\"""", -) - -s.replace( - "CONTRIBUTING.rst", "remote \\(``master``\\)", "remote (``main``)", -) - -s.replace( - "CONTRIBUTING.rst", "blob/master/CONTRIBUTING.rst", "blob/main/CONTRIBUTING.rst", -) - -s.replace( - "CONTRIBUTING.rst", "blob/master/noxfile.py", "blob/main/noxfile.py", -) - -s.replace( - "docs/conf.py", "master_doc", "root_doc", -) - -s.replace( - "docs/conf.py", "# The master toctree document.", "# The root toctree document.", -) - s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py new file mode 100644 index 00000000..f6af0cb9 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReadSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateReadSessionRequest(parent="parent_value",) + + # Make the request + response = await client.create_read_session(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py new file mode 100644 index 00000000..a9147ca7 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReadSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateReadSessionRequest(parent="parent_value",) + + # Make the request + response = client.create_read_session(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py new file mode 100644 index 00000000..790378a2 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReadRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_read_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.ReadRowsRequest(read_stream="read_stream_value",) + + # Make the request + stream = await client.read_rows(request=request) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py new file mode 100644 index 00000000..bab32a0b --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReadRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_read_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.ReadRowsRequest(read_stream="read_stream_value",) + + # Make the request + stream = client.read_rows(request=request) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py new file mode 100644 index 00000000..104b1ab5 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SplitReadStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.SplitReadStreamRequest(name="name_value",) + + # Make the request + response = await client.split_read_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py new file mode 100644 index 00000000..2f46b2a6 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SplitReadStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.SplitReadStreamRequest(name="name_value",) + + # Make the request + response = client.split_read_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py new file mode 100644 index 00000000..9d104026 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AppendRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_append_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.AppendRowsRequest(write_stream="write_stream_value",) + + # This method expects an iterator which contains + # 'bigquery_storage_v1.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.append_rows(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py new file mode 100644 index 00000000..40fae911 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AppendRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_append_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.AppendRowsRequest(write_stream="write_stream_value",) + + # This method expects an iterator which contains + # 'bigquery_storage_v1.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.append_rows(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py new file mode 100644 index 00000000..d6fd00aa --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCommitWriteStreams +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=["write_streams_value_1", "write_streams_value_2"], + ) + + # Make the request + response = await client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py new file mode 100644 index 00000000..b8f877bd --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCommitWriteStreams +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=["write_streams_value_1", "write_streams_value_2"], + ) + + # Make the request + response = client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py new file mode 100644 index 00000000..fbdb1a39 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateWriteStreamRequest(parent="parent_value",) + + # Make the request + response = await client.create_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py new file mode 100644 index 00000000..dcd11631 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateWriteStreamRequest(parent="parent_value",) + + # Make the request + response = client.create_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py new file mode 100644 index 00000000..c3b8abb5 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinalizeWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FinalizeWriteStreamRequest(name="name_value",) + + # Make the request + response = await client.finalize_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py new file mode 100644 index 00000000..69c14a0e --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinalizeWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FinalizeWriteStreamRequest(name="name_value",) + + # Make the request + response = client.finalize_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py new file mode 100644 index 00000000..6d82da59 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FlushRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FlushRowsRequest(write_stream="write_stream_value",) + + # Make the request + response = await client.flush_rows(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py new file mode 100644 index 00000000..15862b8c --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FlushRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FlushRowsRequest(write_stream="write_stream_value",) + + # Make the request + response = client.flush_rows(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py new file mode 100644 index 00000000..274003a9 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.GetWriteStreamRequest(name="name_value",) + + # Make the request + response = await client.get_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py new file mode 100644 index 00000000..16727369 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.GetWriteStreamRequest(name="name_value",) + + # Make the request + response = client.get_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py new file mode 100644 index 00000000..f16b850f --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReadSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateReadSessionRequest(parent="parent_value",) + + # Make the request + response = await client.create_read_session(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py new file mode 100644 index 00000000..bbc936df --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReadSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateReadSessionRequest(parent="parent_value",) + + # Make the request + response = client.create_read_session(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py new file mode 100644 index 00000000..3b13b97a --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReadRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_read_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.ReadRowsRequest(read_stream="read_stream_value",) + + # Make the request + stream = await client.read_rows(request=request) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py new file mode 100644 index 00000000..9dd12df4 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReadRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_read_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.ReadRowsRequest(read_stream="read_stream_value",) + + # Make the request + stream = client.read_rows(request=request) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py new file mode 100644 index 00000000..b3c9fbb1 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SplitReadStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.SplitReadStreamRequest(name="name_value",) + + # Make the request + response = await client.split_read_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py new file mode 100644 index 00000000..0d81457a --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SplitReadStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.SplitReadStreamRequest(name="name_value",) + + # Make the request + response = client.split_read_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py new file mode 100644 index 00000000..bda4f37b --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AppendRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_append_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.AppendRowsRequest( + write_stream="write_stream_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1beta2.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.append_rows(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py new file mode 100644 index 00000000..a8f5596d --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AppendRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_append_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.AppendRowsRequest( + write_stream="write_stream_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1beta2.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.append_rows(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py new file mode 100644 index 00000000..dace7ac7 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCommitWriteStreams +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=["write_streams_value_1", "write_streams_value_2"], + ) + + # Make the request + response = await client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py new file mode 100644 index 00000000..6113c6b4 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCommitWriteStreams +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=["write_streams_value_1", "write_streams_value_2"], + ) + + # Make the request + response = client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py new file mode 100644 index 00000000..7645b9fe --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateWriteStreamRequest(parent="parent_value",) + + # Make the request + response = await client.create_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py new file mode 100644 index 00000000..d97ca878 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateWriteStreamRequest(parent="parent_value",) + + # Make the request + response = client.create_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py new file mode 100644 index 00000000..ca26d51e --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinalizeWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest(name="name_value",) + + # Make the request + response = await client.finalize_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py new file mode 100644 index 00000000..71501df1 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinalizeWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest(name="name_value",) + + # Make the request + response = client.finalize_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py new file mode 100644 index 00000000..8fa8bb25 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FlushRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FlushRowsRequest( + write_stream="write_stream_value", + ) + + # Make the request + response = await client.flush_rows(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py new file mode 100644 index 00000000..569139e2 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FlushRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FlushRowsRequest( + write_stream="write_stream_value", + ) + + # Make the request + response = client.flush_rows(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_sync] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py new file mode 100644 index 00000000..2ef47a39 --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.GetWriteStreamRequest(name="name_value",) + + # Make the request + response = await client.get_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_async] diff --git a/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py new file mode 100644 index 00000000..8a0f081a --- /dev/null +++ b/samples/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.GetWriteStreamRequest(name="name_value",) + + # Make the request + response = client.get_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_sync] diff --git a/samples/generated_samples/snippet_metadata_bigquery storage_v1.json b/samples/generated_samples/snippet_metadata_bigquery storage_v1.json new file mode 100644 index 00000000..a47e9843 --- /dev/null +++ b/samples/generated_samples/snippet_metadata_bigquery storage_v1.json @@ -0,0 +1,805 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/samples/generated_samples/snippet_metadata_bigquery storage_v1beta2.json b/samples/generated_samples/snippet_metadata_bigquery storage_v1beta2.json new file mode 100644 index 00000000..3ea6be83 --- /dev/null +++ b/samples/generated_samples/snippet_metadata_bigquery storage_v1beta2.json @@ -0,0 +1,805 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/samples/quickstart/noxfile.py b/samples/quickstart/noxfile.py index 3bbef5d5..20cdfc62 100644 --- a/samples/quickstart/noxfile.py +++ b/samples/quickstart/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: diff --git a/samples/snippets/customer_record.proto b/samples/snippets/customer_record.proto index 06142c3b..6c79336b 100644 --- a/samples/snippets/customer_record.proto +++ b/samples/snippets/customer_record.proto @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +// [START bigquerystorage_append_rows_pending_customer_record] // The BigQuery Storage API expects protocol buffer data to be encoded in the // proto2 wire format. This allows it to disambiguate missing optional fields // from default values without the need for wrapper types. @@ -26,3 +27,4 @@ message CustomerRecord { // Use the required keyword for client-side validation of required fields. required int64 row_num = 2; } +// [END bigquerystorage_append_rows_pending_customer_record] diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 3bbef5d5..20cdfc62 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: diff --git a/samples/to_dataframe/noxfile.py b/samples/to_dataframe/noxfile.py index 3bbef5d5..20cdfc62 100644 --- a/samples/to_dataframe/noxfile.py +++ b/samples/to_dataframe/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: diff --git a/setup.py b/setup.py index fe6f5f89..ba3f3b15 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-bigquery-storage" description = "BigQuery Storage API API client library" -version = "2.11.0" +version = "2.12.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # NOTE: Maintainers, please do not require google-api-core>=2.x.x @@ -29,7 +29,6 @@ # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.28.0, <3.0.0dev", "proto-plus >= 1.18.0", - "libcst >= 0.2.5", ] extras = { "pandas": ["pandas>=0.21.1"], diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py index e22cc909..616eef74 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py @@ -389,6 +389,83 @@ def test_big_query_read_client_mtls_env_auto( ) +@pytest.mark.parametrize("client_class", [BigQueryReadClient, BigQueryReadAsyncClient]) +@mock.patch.object( + BigQueryReadClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BigQueryReadClient) +) +@mock.patch.object( + BigQueryReadAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BigQueryReadAsyncClient), +) +def test_big_query_read_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -421,21 +498,28 @@ def test_big_query_read_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (BigQueryReadClient, transports.BigQueryReadGrpcTransport, "grpc"), + ( + BigQueryReadClient, + transports.BigQueryReadGrpcTransport, + "grpc", + grpc_helpers, + ), ( BigQueryReadAsyncClient, transports.BigQueryReadGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_big_query_read_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -469,6 +553,75 @@ def test_big_query_read_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BigQueryReadClient, + transports.BigQueryReadGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BigQueryReadAsyncClient, + transports.BigQueryReadGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_big_query_read_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigquerystorage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=None, + default_host="bigquerystorage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [storage.CreateReadSessionRequest, dict,]) def test_create_read_session(request_type, transport: str = "grpc"): client = BigQueryReadClient( @@ -489,6 +642,7 @@ def test_create_read_session(request_type, transport: str = "grpc"): data_format=stream.DataFormat.AVRO, table="table_value", estimated_total_bytes_scanned=3076, + trace_id="trace_id_value", avro_schema=avro.AvroSchema(schema="schema_value"), ) response = client.create_read_session(request) @@ -504,6 +658,7 @@ def test_create_read_session(request_type, transport: str = "grpc"): assert response.data_format == stream.DataFormat.AVRO assert response.table == "table_value" assert response.estimated_total_bytes_scanned == 3076 + assert response.trace_id == "trace_id_value" def test_create_read_session_empty_call(): @@ -546,6 +701,7 @@ async def test_create_read_session_async( data_format=stream.DataFormat.AVRO, table="table_value", estimated_total_bytes_scanned=3076, + trace_id="trace_id_value", ) ) response = await client.create_read_session(request) @@ -561,6 +717,7 @@ async def test_create_read_session_async( assert response.data_format == stream.DataFormat.AVRO assert response.table == "table_value" assert response.estimated_total_bytes_scanned == 3076 + assert response.trace_id == "trace_id_value" @pytest.mark.asyncio @@ -1081,6 +1238,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.BigQueryReadGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BigQueryReadClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BigQueryReadClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.BigQueryReadGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -1187,7 +1361,6 @@ def test_big_query_read_base_transport_with_credentials_file(): scopes=None, default_scopes=( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ), quota_project_id="octopus", @@ -1214,7 +1387,6 @@ def test_big_query_read_auth_adc(): scopes=None, default_scopes=( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ), quota_project_id=None, @@ -1238,7 +1410,6 @@ def test_big_query_read_transport_auth_adc(transport_class): scopes=["1", "2"], default_scopes=( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ), quota_project_id="octopus", @@ -1271,7 +1442,6 @@ def test_big_query_read_transport_create_channel(transport_class, grpc_helpers): quota_project_id="octopus", default_scopes=( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ), scopes=["1", "2"], @@ -1697,3 +1867,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BigQueryReadClient, transports.BigQueryReadGrpcTransport), + (BigQueryReadAsyncClient, transports.BigQueryReadGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py index c63b1eac..9b6af1bc 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py @@ -405,6 +405,87 @@ def test_big_query_write_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [BigQueryWriteClient, BigQueryWriteAsyncClient] +) +@mock.patch.object( + BigQueryWriteClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BigQueryWriteClient), +) +@mock.patch.object( + BigQueryWriteAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BigQueryWriteAsyncClient), +) +def test_big_query_write_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -437,21 +518,28 @@ def test_big_query_write_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (BigQueryWriteClient, transports.BigQueryWriteGrpcTransport, "grpc"), + ( + BigQueryWriteClient, + transports.BigQueryWriteGrpcTransport, + "grpc", + grpc_helpers, + ), ( BigQueryWriteAsyncClient, transports.BigQueryWriteGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_big_query_write_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -487,6 +575,76 @@ def test_big_query_write_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BigQueryWriteClient, + transports.BigQueryWriteGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BigQueryWriteAsyncClient, + transports.BigQueryWriteGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_big_query_write_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigquerystorage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/bigquery.insertdata", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=None, + default_host="bigquerystorage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [storage.CreateWriteStreamRequest, dict,]) def test_create_write_stream(request_type, transport: str = "grpc"): client = BigQueryWriteClient( @@ -1652,6 +1810,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.BigQueryWriteGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BigQueryWriteClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BigQueryWriteClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.BigQueryWriteGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2258,3 +2433,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BigQueryWriteClient, transports.BigQueryWriteGrpcTransport), + (BigQueryWriteAsyncClient, transports.BigQueryWriteGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py index fca4bb24..d01474c7 100644 --- a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py @@ -391,6 +391,83 @@ def test_big_query_read_client_mtls_env_auto( ) +@pytest.mark.parametrize("client_class", [BigQueryReadClient, BigQueryReadAsyncClient]) +@mock.patch.object( + BigQueryReadClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BigQueryReadClient) +) +@mock.patch.object( + BigQueryReadAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BigQueryReadAsyncClient), +) +def test_big_query_read_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -423,21 +500,28 @@ def test_big_query_read_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (BigQueryReadClient, transports.BigQueryReadGrpcTransport, "grpc"), + ( + BigQueryReadClient, + transports.BigQueryReadGrpcTransport, + "grpc", + grpc_helpers, + ), ( BigQueryReadAsyncClient, transports.BigQueryReadGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_big_query_read_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -471,6 +555,75 @@ def test_big_query_read_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BigQueryReadClient, + transports.BigQueryReadGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BigQueryReadAsyncClient, + transports.BigQueryReadGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_big_query_read_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigquerystorage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=None, + default_host="bigquerystorage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [storage.CreateReadSessionRequest, dict,]) def test_create_read_session(request_type, transport: str = "grpc"): client = BigQueryReadClient( @@ -1079,6 +1232,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.BigQueryReadGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BigQueryReadClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BigQueryReadClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.BigQueryReadGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -1185,7 +1355,6 @@ def test_big_query_read_base_transport_with_credentials_file(): scopes=None, default_scopes=( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ), quota_project_id="octopus", @@ -1212,7 +1381,6 @@ def test_big_query_read_auth_adc(): scopes=None, default_scopes=( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ), quota_project_id=None, @@ -1236,7 +1404,6 @@ def test_big_query_read_transport_auth_adc(transport_class): scopes=["1", "2"], default_scopes=( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ), quota_project_id="octopus", @@ -1269,7 +1436,6 @@ def test_big_query_read_transport_create_channel(transport_class, grpc_helpers): quota_project_id="octopus", default_scopes=( "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ), scopes=["1", "2"], @@ -1695,3 +1861,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BigQueryReadClient, transports.BigQueryReadGrpcTransport), + (BigQueryReadAsyncClient, transports.BigQueryReadGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py index d58b5558..68a1ba98 100644 --- a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py +++ b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py @@ -405,6 +405,87 @@ def test_big_query_write_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [BigQueryWriteClient, BigQueryWriteAsyncClient] +) +@mock.patch.object( + BigQueryWriteClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BigQueryWriteClient), +) +@mock.patch.object( + BigQueryWriteAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BigQueryWriteAsyncClient), +) +def test_big_query_write_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -437,21 +518,28 @@ def test_big_query_write_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (BigQueryWriteClient, transports.BigQueryWriteGrpcTransport, "grpc"), + ( + BigQueryWriteClient, + transports.BigQueryWriteGrpcTransport, + "grpc", + grpc_helpers, + ), ( BigQueryWriteAsyncClient, transports.BigQueryWriteGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_big_query_write_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -487,6 +575,76 @@ def test_big_query_write_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BigQueryWriteClient, + transports.BigQueryWriteGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BigQueryWriteAsyncClient, + transports.BigQueryWriteGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_big_query_write_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigquerystorage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/bigquery.insertdata", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=None, + default_host="bigquerystorage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [storage.CreateWriteStreamRequest, dict,]) def test_create_write_stream(request_type, transport: str = "grpc"): client = BigQueryWriteClient( @@ -1640,6 +1798,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.BigQueryWriteGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BigQueryWriteClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BigQueryWriteClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.BigQueryWriteGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2246,3 +2421,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BigQueryWriteClient, transports.BigQueryWriteGrpcTransport), + (BigQueryWriteAsyncClient, transports.BigQueryWriteGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + )