From 9d46e36339c390c080e96f3bec83b6df6bc02c7d Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 26 Aug 2021 21:47:06 +0000 Subject: [PATCH 1/3] feat: Added support for logs-based alerts: https://cloud.google.com/logging/docs/alerting/log-based-alerts feat: Added support for user-defined labels on cloud monitoring's Service and ServiceLevelObjective objects fix!: mark required fields in QueryTimeSeriesRequest as required PiperOrigin-RevId: 393200011 Source-Link: https://github.com/googleapis/googleapis/commit/354d0763e7e4ce7ab370790ffba0c3c25b1420c3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/5791e7bec19d247399d33396b4c1212f5208c505 --- owl-bot-staging/v3/.coveragerc | 17 + owl-bot-staging/v3/MANIFEST.in | 2 + owl-bot-staging/v3/README.rst | 49 + owl-bot-staging/v3/docs/conf.py | 376 ++ owl-bot-staging/v3/docs/index.rst | 7 + .../monitoring_v3/alert_policy_service.rst | 10 + .../v3/docs/monitoring_v3/group_service.rst | 10 + .../v3/docs/monitoring_v3/metric_service.rst | 10 + .../notification_channel_service.rst | 10 + .../v3/docs/monitoring_v3/query_service.rst | 10 + .../service_monitoring_service.rst | 10 + .../v3/docs/monitoring_v3/services.rst | 12 + .../v3/docs/monitoring_v3/types.rst | 7 + .../monitoring_v3/uptime_check_service.rst | 10 + .../v3/google/cloud/monitoring/__init__.py | 237 ++ .../v3/google/cloud/monitoring/py.typed | 2 + .../v3/google/cloud/monitoring_v3/__init__.py | 238 ++ .../cloud/monitoring_v3/gapic_metadata.json | 567 +++ .../v3/google/cloud/monitoring_v3/py.typed | 2 + .../cloud/monitoring_v3/services/__init__.py | 15 + .../services/alert_policy_service/__init__.py | 22 + .../alert_policy_service/async_client.py | 673 +++ .../services/alert_policy_service/client.py | 855 ++++ .../services/alert_policy_service/pagers.py | 141 + .../transports/__init__.py | 33 + .../alert_policy_service/transports/base.py | 246 ++ .../alert_policy_service/transports/grpc.py | 370 ++ .../transports/grpc_asyncio.py | 374 ++ .../services/group_service/__init__.py | 22 + .../services/group_service/async_client.py | 793 ++++ .../services/group_service/client.py | 954 +++++ .../services/group_service/pagers.py | 264 ++ .../group_service/transports/__init__.py | 33 + .../services/group_service/transports/base.py | 273 ++ .../services/group_service/transports/grpc.py | 398 ++ .../group_service/transports/grpc_asyncio.py | 402 ++ .../services/metric_service/__init__.py | 22 + .../services/metric_service/async_client.py | 967 +++++ .../services/metric_service/client.py | 1140 +++++ .../services/metric_service/pagers.py | 387 ++ .../metric_service/transports/__init__.py | 33 + .../metric_service/transports/base.py | 308 ++ .../metric_service/transports/grpc.py | 453 ++ .../metric_service/transports/grpc_asyncio.py | 457 ++ .../notification_channel_service/__init__.py | 22 + .../async_client.py | 1143 +++++ .../notification_channel_service/client.py | 1301 ++++++ .../notification_channel_service/pagers.py | 263 ++ .../transports/__init__.py | 33 + .../transports/base.py | 340 ++ .../transports/grpc.py | 538 +++ .../transports/grpc_asyncio.py | 542 +++ .../services/query_service/__init__.py | 22 + .../services/query_service/async_client.py | 231 + .../services/query_service/client.py | 414 ++ .../services/query_service/pagers.py | 141 + .../query_service/transports/__init__.py | 33 + .../services/query_service/transports/base.py | 170 + .../services/query_service/transports/grpc.py | 255 ++ .../query_service/transports/grpc_asyncio.py | 259 ++ .../service_monitoring_service/__init__.py | 22 + .../async_client.py | 1061 +++++ .../service_monitoring_service/client.py | 1225 ++++++ .../service_monitoring_service/pagers.py | 263 ++ .../transports/__init__.py | 33 + .../transports/base.py | 335 ++ .../transports/grpc.py | 492 +++ .../transports/grpc_asyncio.py | 496 +++ .../services/uptime_check_service/__init__.py | 22 + .../uptime_check_service/async_client.py | 686 +++ .../services/uptime_check_service/client.py | 854 ++++ .../services/uptime_check_service/pagers.py | 263 ++ .../transports/__init__.py | 33 + .../uptime_check_service/transports/base.py | 266 ++ .../uptime_check_service/transports/grpc.py | 401 ++ .../transports/grpc_asyncio.py | 405 ++ .../cloud/monitoring_v3/types/__init__.py | 240 ++ .../google/cloud/monitoring_v3/types/alert.py | 654 +++ .../monitoring_v3/types/alert_service.py | 257 ++ .../cloud/monitoring_v3/types/common.py | 333 ++ .../monitoring_v3/types/dropped_labels.py | 59 + .../google/cloud/monitoring_v3/types/group.py | 112 + .../monitoring_v3/types/group_service.py | 346 ++ .../cloud/monitoring_v3/types/metric.py | 417 ++ .../monitoring_v3/types/metric_service.py | 665 +++ .../monitoring_v3/types/mutation_record.py | 50 + .../cloud/monitoring_v3/types/notification.py | 256 ++ .../types/notification_service.py | 445 ++ .../monitoring_v3/types/query_service.py | 25 + .../cloud/monitoring_v3/types/service.py | 775 ++++ .../monitoring_v3/types/service_service.py | 416 ++ .../cloud/monitoring_v3/types/span_context.py | 61 + .../cloud/monitoring_v3/types/uptime.py | 538 +++ .../monitoring_v3/types/uptime_service.py | 269 ++ owl-bot-staging/v3/mypy.ini | 3 + owl-bot-staging/v3/noxfile.py | 132 + .../scripts/fixup_monitoring_v3_keywords.py | 221 + owl-bot-staging/v3/setup.py | 54 + owl-bot-staging/v3/tests/__init__.py | 16 + owl-bot-staging/v3/tests/unit/__init__.py | 16 + .../v3/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/monitoring_v3/__init__.py | 16 + .../test_alert_policy_service.py | 2365 +++++++++++ .../gapic/monitoring_v3/test_group_service.py | 2765 ++++++++++++ .../monitoring_v3/test_metric_service.py | 3511 +++++++++++++++ .../test_notification_channel_service.py | 3757 +++++++++++++++++ .../gapic/monitoring_v3/test_query_service.py | 1264 ++++++ .../test_service_monitoring_service.py | 3703 ++++++++++++++++ .../test_uptime_check_service.py | 2610 ++++++++++++ 109 files changed, 50152 insertions(+) create mode 100644 owl-bot-staging/v3/.coveragerc create mode 100644 owl-bot-staging/v3/MANIFEST.in create mode 100644 owl-bot-staging/v3/README.rst create mode 100644 owl-bot-staging/v3/docs/conf.py create mode 100644 owl-bot-staging/v3/docs/index.rst create mode 100644 owl-bot-staging/v3/docs/monitoring_v3/alert_policy_service.rst create mode 100644 owl-bot-staging/v3/docs/monitoring_v3/group_service.rst create mode 100644 owl-bot-staging/v3/docs/monitoring_v3/metric_service.rst create mode 100644 owl-bot-staging/v3/docs/monitoring_v3/notification_channel_service.rst create mode 100644 owl-bot-staging/v3/docs/monitoring_v3/query_service.rst create mode 100644 owl-bot-staging/v3/docs/monitoring_v3/service_monitoring_service.rst create mode 100644 owl-bot-staging/v3/docs/monitoring_v3/services.rst create mode 100644 owl-bot-staging/v3/docs/monitoring_v3/types.rst create mode 100644 owl-bot-staging/v3/docs/monitoring_v3/uptime_check_service.rst create mode 100644 owl-bot-staging/v3/google/cloud/monitoring/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring/py.typed create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/gapic_metadata.json create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/py.typed create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/pagers.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/async_client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/pagers.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/base.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/async_client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/pagers.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/base.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/pagers.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/async_client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/pagers.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/base.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/pagers.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/client.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/pagers.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/__init__.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert_service.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/common.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/dropped_labels.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/group.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/group_service.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric_service.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/mutation_record.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification_service.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/query_service.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/service.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/service_service.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/span_context.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime.py create mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime_service.py create mode 100644 owl-bot-staging/v3/mypy.ini create mode 100644 owl-bot-staging/v3/noxfile.py create mode 100644 owl-bot-staging/v3/scripts/fixup_monitoring_v3_keywords.py create mode 100644 owl-bot-staging/v3/setup.py create mode 100644 owl-bot-staging/v3/tests/__init__.py create mode 100644 owl-bot-staging/v3/tests/unit/__init__.py create mode 100644 owl-bot-staging/v3/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/__init__.py create mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py create mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_group_service.py create mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_metric_service.py create mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py create mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_query_service.py create mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py create mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py diff --git a/owl-bot-staging/v3/.coveragerc b/owl-bot-staging/v3/.coveragerc new file mode 100644 index 00000000..130673a5 --- /dev/null +++ b/owl-bot-staging/v3/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/monitoring/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v3/MANIFEST.in b/owl-bot-staging/v3/MANIFEST.in new file mode 100644 index 00000000..cc42f1eb --- /dev/null +++ b/owl-bot-staging/v3/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/monitoring *.py +recursive-include google/cloud/monitoring_v3 *.py diff --git a/owl-bot-staging/v3/README.rst b/owl-bot-staging/v3/README.rst new file mode 100644 index 00000000..1f076464 --- /dev/null +++ b/owl-bot-staging/v3/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Monitoring API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Monitoring API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v3/docs/conf.py b/owl-bot-staging/v3/docs/conf.py new file mode 100644 index 00000000..12adfd5b --- /dev/null +++ b/owl-bot-staging/v3/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-monitoring documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-monitoring" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-monitoring-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-monitoring.tex", + u"google-cloud-monitoring Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-monitoring", + u"Google Cloud Monitoring Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-monitoring", + u"google-cloud-monitoring Documentation", + author, + "google-cloud-monitoring", + "GAPIC library for Google Cloud Monitoring API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v3/docs/index.rst b/owl-bot-staging/v3/docs/index.rst new file mode 100644 index 00000000..d0a12177 --- /dev/null +++ b/owl-bot-staging/v3/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + monitoring_v3/services + monitoring_v3/types diff --git a/owl-bot-staging/v3/docs/monitoring_v3/alert_policy_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/alert_policy_service.rst new file mode 100644 index 00000000..a11da806 --- /dev/null +++ b/owl-bot-staging/v3/docs/monitoring_v3/alert_policy_service.rst @@ -0,0 +1,10 @@ +AlertPolicyService +------------------------------------ + +.. automodule:: google.cloud.monitoring_v3.services.alert_policy_service + :members: + :inherited-members: + +.. automodule:: google.cloud.monitoring_v3.services.alert_policy_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/group_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/group_service.rst new file mode 100644 index 00000000..74703e5e --- /dev/null +++ b/owl-bot-staging/v3/docs/monitoring_v3/group_service.rst @@ -0,0 +1,10 @@ +GroupService +------------------------------ + +.. automodule:: google.cloud.monitoring_v3.services.group_service + :members: + :inherited-members: + +.. automodule:: google.cloud.monitoring_v3.services.group_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/metric_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/metric_service.rst new file mode 100644 index 00000000..74a1d3cf --- /dev/null +++ b/owl-bot-staging/v3/docs/monitoring_v3/metric_service.rst @@ -0,0 +1,10 @@ +MetricService +------------------------------- + +.. automodule:: google.cloud.monitoring_v3.services.metric_service + :members: + :inherited-members: + +.. automodule:: google.cloud.monitoring_v3.services.metric_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/notification_channel_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/notification_channel_service.rst new file mode 100644 index 00000000..85e28278 --- /dev/null +++ b/owl-bot-staging/v3/docs/monitoring_v3/notification_channel_service.rst @@ -0,0 +1,10 @@ +NotificationChannelService +-------------------------------------------- + +.. automodule:: google.cloud.monitoring_v3.services.notification_channel_service + :members: + :inherited-members: + +.. automodule:: google.cloud.monitoring_v3.services.notification_channel_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/query_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/query_service.rst new file mode 100644 index 00000000..b144dc56 --- /dev/null +++ b/owl-bot-staging/v3/docs/monitoring_v3/query_service.rst @@ -0,0 +1,10 @@ +QueryService +------------------------------ + +.. automodule:: google.cloud.monitoring_v3.services.query_service + :members: + :inherited-members: + +.. automodule:: google.cloud.monitoring_v3.services.query_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/service_monitoring_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/service_monitoring_service.rst new file mode 100644 index 00000000..f7c35fa6 --- /dev/null +++ b/owl-bot-staging/v3/docs/monitoring_v3/service_monitoring_service.rst @@ -0,0 +1,10 @@ +ServiceMonitoringService +------------------------------------------ + +.. automodule:: google.cloud.monitoring_v3.services.service_monitoring_service + :members: + :inherited-members: + +.. automodule:: google.cloud.monitoring_v3.services.service_monitoring_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/services.rst b/owl-bot-staging/v3/docs/monitoring_v3/services.rst new file mode 100644 index 00000000..18a1eb15 --- /dev/null +++ b/owl-bot-staging/v3/docs/monitoring_v3/services.rst @@ -0,0 +1,12 @@ +Services for Google Cloud Monitoring v3 API +=========================================== +.. toctree:: + :maxdepth: 2 + + alert_policy_service + group_service + metric_service + notification_channel_service + query_service + service_monitoring_service + uptime_check_service diff --git a/owl-bot-staging/v3/docs/monitoring_v3/types.rst b/owl-bot-staging/v3/docs/monitoring_v3/types.rst new file mode 100644 index 00000000..ed0eeeef --- /dev/null +++ b/owl-bot-staging/v3/docs/monitoring_v3/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Monitoring v3 API +======================================== + +.. automodule:: google.cloud.monitoring_v3.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/uptime_check_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/uptime_check_service.rst new file mode 100644 index 00000000..7149a771 --- /dev/null +++ b/owl-bot-staging/v3/docs/monitoring_v3/uptime_check_service.rst @@ -0,0 +1,10 @@ +UptimeCheckService +------------------------------------ + +.. automodule:: google.cloud.monitoring_v3.services.uptime_check_service + :members: + :inherited-members: + +.. automodule:: google.cloud.monitoring_v3.services.uptime_check_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v3/google/cloud/monitoring/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring/__init__.py new file mode 100644 index 00000000..3bc62397 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring/__init__.py @@ -0,0 +1,237 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.monitoring_v3.services.alert_policy_service.client import AlertPolicyServiceClient +from google.cloud.monitoring_v3.services.alert_policy_service.async_client import AlertPolicyServiceAsyncClient +from google.cloud.monitoring_v3.services.group_service.client import GroupServiceClient +from google.cloud.monitoring_v3.services.group_service.async_client import GroupServiceAsyncClient +from google.cloud.monitoring_v3.services.metric_service.client import MetricServiceClient +from google.cloud.monitoring_v3.services.metric_service.async_client import MetricServiceAsyncClient +from google.cloud.monitoring_v3.services.notification_channel_service.client import NotificationChannelServiceClient +from google.cloud.monitoring_v3.services.notification_channel_service.async_client import NotificationChannelServiceAsyncClient +from google.cloud.monitoring_v3.services.query_service.client import QueryServiceClient +from google.cloud.monitoring_v3.services.query_service.async_client import QueryServiceAsyncClient +from google.cloud.monitoring_v3.services.service_monitoring_service.client import ServiceMonitoringServiceClient +from google.cloud.monitoring_v3.services.service_monitoring_service.async_client import ServiceMonitoringServiceAsyncClient +from google.cloud.monitoring_v3.services.uptime_check_service.client import UptimeCheckServiceClient +from google.cloud.monitoring_v3.services.uptime_check_service.async_client import UptimeCheckServiceAsyncClient + +from google.cloud.monitoring_v3.types.alert import AlertPolicy +from google.cloud.monitoring_v3.types.alert_service import CreateAlertPolicyRequest +from google.cloud.monitoring_v3.types.alert_service import DeleteAlertPolicyRequest +from google.cloud.monitoring_v3.types.alert_service import GetAlertPolicyRequest +from google.cloud.monitoring_v3.types.alert_service import ListAlertPoliciesRequest +from google.cloud.monitoring_v3.types.alert_service import ListAlertPoliciesResponse +from google.cloud.monitoring_v3.types.alert_service import UpdateAlertPolicyRequest +from google.cloud.monitoring_v3.types.common import Aggregation +from google.cloud.monitoring_v3.types.common import TimeInterval +from google.cloud.monitoring_v3.types.common import TypedValue +from google.cloud.monitoring_v3.types.common import ComparisonType +from google.cloud.monitoring_v3.types.dropped_labels import DroppedLabels +from google.cloud.monitoring_v3.types.group import Group +from google.cloud.monitoring_v3.types.group_service import CreateGroupRequest +from google.cloud.monitoring_v3.types.group_service import DeleteGroupRequest +from google.cloud.monitoring_v3.types.group_service import GetGroupRequest +from google.cloud.monitoring_v3.types.group_service import ListGroupMembersRequest +from google.cloud.monitoring_v3.types.group_service import ListGroupMembersResponse +from google.cloud.monitoring_v3.types.group_service import ListGroupsRequest +from google.cloud.monitoring_v3.types.group_service import ListGroupsResponse +from google.cloud.monitoring_v3.types.group_service import UpdateGroupRequest +from google.cloud.monitoring_v3.types.metric import LabelValue +from google.cloud.monitoring_v3.types.metric import Point +from google.cloud.monitoring_v3.types.metric import QueryError +from google.cloud.monitoring_v3.types.metric import TextLocator +from google.cloud.monitoring_v3.types.metric import TimeSeries +from google.cloud.monitoring_v3.types.metric import TimeSeriesData +from google.cloud.monitoring_v3.types.metric import TimeSeriesDescriptor +from google.cloud.monitoring_v3.types.metric_service import CreateMetricDescriptorRequest +from google.cloud.monitoring_v3.types.metric_service import CreateTimeSeriesError +from google.cloud.monitoring_v3.types.metric_service import CreateTimeSeriesRequest +from google.cloud.monitoring_v3.types.metric_service import CreateTimeSeriesSummary +from google.cloud.monitoring_v3.types.metric_service import DeleteMetricDescriptorRequest +from google.cloud.monitoring_v3.types.metric_service import GetMetricDescriptorRequest +from google.cloud.monitoring_v3.types.metric_service import GetMonitoredResourceDescriptorRequest +from google.cloud.monitoring_v3.types.metric_service import ListMetricDescriptorsRequest +from google.cloud.monitoring_v3.types.metric_service import ListMetricDescriptorsResponse +from google.cloud.monitoring_v3.types.metric_service import ListMonitoredResourceDescriptorsRequest +from google.cloud.monitoring_v3.types.metric_service import ListMonitoredResourceDescriptorsResponse +from google.cloud.monitoring_v3.types.metric_service import ListTimeSeriesRequest +from google.cloud.monitoring_v3.types.metric_service import ListTimeSeriesResponse +from google.cloud.monitoring_v3.types.metric_service import QueryErrorList +from google.cloud.monitoring_v3.types.metric_service import QueryTimeSeriesRequest +from google.cloud.monitoring_v3.types.metric_service import QueryTimeSeriesResponse +from google.cloud.monitoring_v3.types.mutation_record import MutationRecord +from google.cloud.monitoring_v3.types.notification import NotificationChannel +from google.cloud.monitoring_v3.types.notification import NotificationChannelDescriptor +from google.cloud.monitoring_v3.types.notification_service import CreateNotificationChannelRequest +from google.cloud.monitoring_v3.types.notification_service import DeleteNotificationChannelRequest +from google.cloud.monitoring_v3.types.notification_service import GetNotificationChannelDescriptorRequest +from google.cloud.monitoring_v3.types.notification_service import GetNotificationChannelRequest +from google.cloud.monitoring_v3.types.notification_service import GetNotificationChannelVerificationCodeRequest +from google.cloud.monitoring_v3.types.notification_service import GetNotificationChannelVerificationCodeResponse +from google.cloud.monitoring_v3.types.notification_service import ListNotificationChannelDescriptorsRequest +from google.cloud.monitoring_v3.types.notification_service import ListNotificationChannelDescriptorsResponse +from google.cloud.monitoring_v3.types.notification_service import ListNotificationChannelsRequest +from google.cloud.monitoring_v3.types.notification_service import ListNotificationChannelsResponse +from google.cloud.monitoring_v3.types.notification_service import SendNotificationChannelVerificationCodeRequest +from google.cloud.monitoring_v3.types.notification_service import UpdateNotificationChannelRequest +from google.cloud.monitoring_v3.types.notification_service import VerifyNotificationChannelRequest +from google.cloud.monitoring_v3.types.service import BasicSli +from google.cloud.monitoring_v3.types.service import DistributionCut +from google.cloud.monitoring_v3.types.service import Range +from google.cloud.monitoring_v3.types.service import RequestBasedSli +from google.cloud.monitoring_v3.types.service import Service +from google.cloud.monitoring_v3.types.service import ServiceLevelIndicator +from google.cloud.monitoring_v3.types.service import ServiceLevelObjective +from google.cloud.monitoring_v3.types.service import TimeSeriesRatio +from google.cloud.monitoring_v3.types.service import WindowsBasedSli +from google.cloud.monitoring_v3.types.service_service import CreateServiceLevelObjectiveRequest +from google.cloud.monitoring_v3.types.service_service import CreateServiceRequest +from google.cloud.monitoring_v3.types.service_service import DeleteServiceLevelObjectiveRequest +from google.cloud.monitoring_v3.types.service_service import DeleteServiceRequest +from google.cloud.monitoring_v3.types.service_service import GetServiceLevelObjectiveRequest +from google.cloud.monitoring_v3.types.service_service import GetServiceRequest +from google.cloud.monitoring_v3.types.service_service import ListServiceLevelObjectivesRequest +from google.cloud.monitoring_v3.types.service_service import ListServiceLevelObjectivesResponse +from google.cloud.monitoring_v3.types.service_service import ListServicesRequest +from google.cloud.monitoring_v3.types.service_service import ListServicesResponse +from google.cloud.monitoring_v3.types.service_service import UpdateServiceLevelObjectiveRequest +from google.cloud.monitoring_v3.types.service_service import UpdateServiceRequest +from google.cloud.monitoring_v3.types.span_context import SpanContext +from google.cloud.monitoring_v3.types.uptime import InternalChecker +from google.cloud.monitoring_v3.types.uptime import UptimeCheckConfig +from google.cloud.monitoring_v3.types.uptime import UptimeCheckIp +from google.cloud.monitoring_v3.types.uptime import GroupResourceType +from google.cloud.monitoring_v3.types.uptime import UptimeCheckRegion +from google.cloud.monitoring_v3.types.uptime_service import CreateUptimeCheckConfigRequest +from google.cloud.monitoring_v3.types.uptime_service import DeleteUptimeCheckConfigRequest +from google.cloud.monitoring_v3.types.uptime_service import GetUptimeCheckConfigRequest +from google.cloud.monitoring_v3.types.uptime_service import ListUptimeCheckConfigsRequest +from google.cloud.monitoring_v3.types.uptime_service import ListUptimeCheckConfigsResponse +from google.cloud.monitoring_v3.types.uptime_service import ListUptimeCheckIpsRequest +from google.cloud.monitoring_v3.types.uptime_service import ListUptimeCheckIpsResponse +from google.cloud.monitoring_v3.types.uptime_service import UpdateUptimeCheckConfigRequest + +__all__ = ('AlertPolicyServiceClient', + 'AlertPolicyServiceAsyncClient', + 'GroupServiceClient', + 'GroupServiceAsyncClient', + 'MetricServiceClient', + 'MetricServiceAsyncClient', + 'NotificationChannelServiceClient', + 'NotificationChannelServiceAsyncClient', + 'QueryServiceClient', + 'QueryServiceAsyncClient', + 'ServiceMonitoringServiceClient', + 'ServiceMonitoringServiceAsyncClient', + 'UptimeCheckServiceClient', + 'UptimeCheckServiceAsyncClient', + 'AlertPolicy', + 'CreateAlertPolicyRequest', + 'DeleteAlertPolicyRequest', + 'GetAlertPolicyRequest', + 'ListAlertPoliciesRequest', + 'ListAlertPoliciesResponse', + 'UpdateAlertPolicyRequest', + 'Aggregation', + 'TimeInterval', + 'TypedValue', + 'ComparisonType', + 'DroppedLabels', + 'Group', + 'CreateGroupRequest', + 'DeleteGroupRequest', + 'GetGroupRequest', + 'ListGroupMembersRequest', + 'ListGroupMembersResponse', + 'ListGroupsRequest', + 'ListGroupsResponse', + 'UpdateGroupRequest', + 'LabelValue', + 'Point', + 'QueryError', + 'TextLocator', + 'TimeSeries', + 'TimeSeriesData', + 'TimeSeriesDescriptor', + 'CreateMetricDescriptorRequest', + 'CreateTimeSeriesError', + 'CreateTimeSeriesRequest', + 'CreateTimeSeriesSummary', + 'DeleteMetricDescriptorRequest', + 'GetMetricDescriptorRequest', + 'GetMonitoredResourceDescriptorRequest', + 'ListMetricDescriptorsRequest', + 'ListMetricDescriptorsResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'ListTimeSeriesRequest', + 'ListTimeSeriesResponse', + 'QueryErrorList', + 'QueryTimeSeriesRequest', + 'QueryTimeSeriesResponse', + 'MutationRecord', + 'NotificationChannel', + 'NotificationChannelDescriptor', + 'CreateNotificationChannelRequest', + 'DeleteNotificationChannelRequest', + 'GetNotificationChannelDescriptorRequest', + 'GetNotificationChannelRequest', + 'GetNotificationChannelVerificationCodeRequest', + 'GetNotificationChannelVerificationCodeResponse', + 'ListNotificationChannelDescriptorsRequest', + 'ListNotificationChannelDescriptorsResponse', + 'ListNotificationChannelsRequest', + 'ListNotificationChannelsResponse', + 'SendNotificationChannelVerificationCodeRequest', + 'UpdateNotificationChannelRequest', + 'VerifyNotificationChannelRequest', + 'BasicSli', + 'DistributionCut', + 'Range', + 'RequestBasedSli', + 'Service', + 'ServiceLevelIndicator', + 'ServiceLevelObjective', + 'TimeSeriesRatio', + 'WindowsBasedSli', + 'CreateServiceLevelObjectiveRequest', + 'CreateServiceRequest', + 'DeleteServiceLevelObjectiveRequest', + 'DeleteServiceRequest', + 'GetServiceLevelObjectiveRequest', + 'GetServiceRequest', + 'ListServiceLevelObjectivesRequest', + 'ListServiceLevelObjectivesResponse', + 'ListServicesRequest', + 'ListServicesResponse', + 'UpdateServiceLevelObjectiveRequest', + 'UpdateServiceRequest', + 'SpanContext', + 'InternalChecker', + 'UptimeCheckConfig', + 'UptimeCheckIp', + 'GroupResourceType', + 'UptimeCheckRegion', + 'CreateUptimeCheckConfigRequest', + 'DeleteUptimeCheckConfigRequest', + 'GetUptimeCheckConfigRequest', + 'ListUptimeCheckConfigsRequest', + 'ListUptimeCheckConfigsResponse', + 'ListUptimeCheckIpsRequest', + 'ListUptimeCheckIpsResponse', + 'UpdateUptimeCheckConfigRequest', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring/py.typed b/owl-bot-staging/v3/google/cloud/monitoring/py.typed new file mode 100644 index 00000000..55d895b0 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-monitoring package uses inline types. diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/__init__.py new file mode 100644 index 00000000..95f197d4 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/__init__.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.alert_policy_service import AlertPolicyServiceClient +from .services.alert_policy_service import AlertPolicyServiceAsyncClient +from .services.group_service import GroupServiceClient +from .services.group_service import GroupServiceAsyncClient +from .services.metric_service import MetricServiceClient +from .services.metric_service import MetricServiceAsyncClient +from .services.notification_channel_service import NotificationChannelServiceClient +from .services.notification_channel_service import NotificationChannelServiceAsyncClient +from .services.query_service import QueryServiceClient +from .services.query_service import QueryServiceAsyncClient +from .services.service_monitoring_service import ServiceMonitoringServiceClient +from .services.service_monitoring_service import ServiceMonitoringServiceAsyncClient +from .services.uptime_check_service import UptimeCheckServiceClient +from .services.uptime_check_service import UptimeCheckServiceAsyncClient + +from .types.alert import AlertPolicy +from .types.alert_service import CreateAlertPolicyRequest +from .types.alert_service import DeleteAlertPolicyRequest +from .types.alert_service import GetAlertPolicyRequest +from .types.alert_service import ListAlertPoliciesRequest +from .types.alert_service import ListAlertPoliciesResponse +from .types.alert_service import UpdateAlertPolicyRequest +from .types.common import Aggregation +from .types.common import TimeInterval +from .types.common import TypedValue +from .types.common import ComparisonType +from .types.dropped_labels import DroppedLabels +from .types.group import Group +from .types.group_service import CreateGroupRequest +from .types.group_service import DeleteGroupRequest +from .types.group_service import GetGroupRequest +from .types.group_service import ListGroupMembersRequest +from .types.group_service import ListGroupMembersResponse +from .types.group_service import ListGroupsRequest +from .types.group_service import ListGroupsResponse +from .types.group_service import UpdateGroupRequest +from .types.metric import LabelValue +from .types.metric import Point +from .types.metric import QueryError +from .types.metric import TextLocator +from .types.metric import TimeSeries +from .types.metric import TimeSeriesData +from .types.metric import TimeSeriesDescriptor +from .types.metric_service import CreateMetricDescriptorRequest +from .types.metric_service import CreateTimeSeriesError +from .types.metric_service import CreateTimeSeriesRequest +from .types.metric_service import CreateTimeSeriesSummary +from .types.metric_service import DeleteMetricDescriptorRequest +from .types.metric_service import GetMetricDescriptorRequest +from .types.metric_service import GetMonitoredResourceDescriptorRequest +from .types.metric_service import ListMetricDescriptorsRequest +from .types.metric_service import ListMetricDescriptorsResponse +from .types.metric_service import ListMonitoredResourceDescriptorsRequest +from .types.metric_service import ListMonitoredResourceDescriptorsResponse +from .types.metric_service import ListTimeSeriesRequest +from .types.metric_service import ListTimeSeriesResponse +from .types.metric_service import QueryErrorList +from .types.metric_service import QueryTimeSeriesRequest +from .types.metric_service import QueryTimeSeriesResponse +from .types.mutation_record import MutationRecord +from .types.notification import NotificationChannel +from .types.notification import NotificationChannelDescriptor +from .types.notification_service import CreateNotificationChannelRequest +from .types.notification_service import DeleteNotificationChannelRequest +from .types.notification_service import GetNotificationChannelDescriptorRequest +from .types.notification_service import GetNotificationChannelRequest +from .types.notification_service import GetNotificationChannelVerificationCodeRequest +from .types.notification_service import GetNotificationChannelVerificationCodeResponse +from .types.notification_service import ListNotificationChannelDescriptorsRequest +from .types.notification_service import ListNotificationChannelDescriptorsResponse +from .types.notification_service import ListNotificationChannelsRequest +from .types.notification_service import ListNotificationChannelsResponse +from .types.notification_service import SendNotificationChannelVerificationCodeRequest +from .types.notification_service import UpdateNotificationChannelRequest +from .types.notification_service import VerifyNotificationChannelRequest +from .types.service import BasicSli +from .types.service import DistributionCut +from .types.service import Range +from .types.service import RequestBasedSli +from .types.service import Service +from .types.service import ServiceLevelIndicator +from .types.service import ServiceLevelObjective +from .types.service import TimeSeriesRatio +from .types.service import WindowsBasedSli +from .types.service_service import CreateServiceLevelObjectiveRequest +from .types.service_service import CreateServiceRequest +from .types.service_service import DeleteServiceLevelObjectiveRequest +from .types.service_service import DeleteServiceRequest +from .types.service_service import GetServiceLevelObjectiveRequest +from .types.service_service import GetServiceRequest +from .types.service_service import ListServiceLevelObjectivesRequest +from .types.service_service import ListServiceLevelObjectivesResponse +from .types.service_service import ListServicesRequest +from .types.service_service import ListServicesResponse +from .types.service_service import UpdateServiceLevelObjectiveRequest +from .types.service_service import UpdateServiceRequest +from .types.span_context import SpanContext +from .types.uptime import InternalChecker +from .types.uptime import UptimeCheckConfig +from .types.uptime import UptimeCheckIp +from .types.uptime import GroupResourceType +from .types.uptime import UptimeCheckRegion +from .types.uptime_service import CreateUptimeCheckConfigRequest +from .types.uptime_service import DeleteUptimeCheckConfigRequest +from .types.uptime_service import GetUptimeCheckConfigRequest +from .types.uptime_service import ListUptimeCheckConfigsRequest +from .types.uptime_service import ListUptimeCheckConfigsResponse +from .types.uptime_service import ListUptimeCheckIpsRequest +from .types.uptime_service import ListUptimeCheckIpsResponse +from .types.uptime_service import UpdateUptimeCheckConfigRequest + +__all__ = ( + 'AlertPolicyServiceAsyncClient', + 'GroupServiceAsyncClient', + 'MetricServiceAsyncClient', + 'NotificationChannelServiceAsyncClient', + 'QueryServiceAsyncClient', + 'ServiceMonitoringServiceAsyncClient', + 'UptimeCheckServiceAsyncClient', +'Aggregation', +'AlertPolicy', +'AlertPolicyServiceClient', +'BasicSli', +'ComparisonType', +'CreateAlertPolicyRequest', +'CreateGroupRequest', +'CreateMetricDescriptorRequest', +'CreateNotificationChannelRequest', +'CreateServiceLevelObjectiveRequest', +'CreateServiceRequest', +'CreateTimeSeriesError', +'CreateTimeSeriesRequest', +'CreateTimeSeriesSummary', +'CreateUptimeCheckConfigRequest', +'DeleteAlertPolicyRequest', +'DeleteGroupRequest', +'DeleteMetricDescriptorRequest', +'DeleteNotificationChannelRequest', +'DeleteServiceLevelObjectiveRequest', +'DeleteServiceRequest', +'DeleteUptimeCheckConfigRequest', +'DistributionCut', +'DroppedLabels', +'GetAlertPolicyRequest', +'GetGroupRequest', +'GetMetricDescriptorRequest', +'GetMonitoredResourceDescriptorRequest', +'GetNotificationChannelDescriptorRequest', +'GetNotificationChannelRequest', +'GetNotificationChannelVerificationCodeRequest', +'GetNotificationChannelVerificationCodeResponse', +'GetServiceLevelObjectiveRequest', +'GetServiceRequest', +'GetUptimeCheckConfigRequest', +'Group', +'GroupResourceType', +'GroupServiceClient', +'InternalChecker', +'LabelValue', +'ListAlertPoliciesRequest', +'ListAlertPoliciesResponse', +'ListGroupMembersRequest', +'ListGroupMembersResponse', +'ListGroupsRequest', +'ListGroupsResponse', +'ListMetricDescriptorsRequest', +'ListMetricDescriptorsResponse', +'ListMonitoredResourceDescriptorsRequest', +'ListMonitoredResourceDescriptorsResponse', +'ListNotificationChannelDescriptorsRequest', +'ListNotificationChannelDescriptorsResponse', +'ListNotificationChannelsRequest', +'ListNotificationChannelsResponse', +'ListServiceLevelObjectivesRequest', +'ListServiceLevelObjectivesResponse', +'ListServicesRequest', +'ListServicesResponse', +'ListTimeSeriesRequest', +'ListTimeSeriesResponse', +'ListUptimeCheckConfigsRequest', +'ListUptimeCheckConfigsResponse', +'ListUptimeCheckIpsRequest', +'ListUptimeCheckIpsResponse', +'MetricServiceClient', +'MutationRecord', +'NotificationChannel', +'NotificationChannelDescriptor', +'NotificationChannelServiceClient', +'Point', +'QueryError', +'QueryErrorList', +'QueryServiceClient', +'QueryTimeSeriesRequest', +'QueryTimeSeriesResponse', +'Range', +'RequestBasedSli', +'SendNotificationChannelVerificationCodeRequest', +'Service', +'ServiceLevelIndicator', +'ServiceLevelObjective', +'ServiceMonitoringServiceClient', +'SpanContext', +'TextLocator', +'TimeInterval', +'TimeSeries', +'TimeSeriesData', +'TimeSeriesDescriptor', +'TimeSeriesRatio', +'TypedValue', +'UpdateAlertPolicyRequest', +'UpdateGroupRequest', +'UpdateNotificationChannelRequest', +'UpdateServiceLevelObjectiveRequest', +'UpdateServiceRequest', +'UpdateUptimeCheckConfigRequest', +'UptimeCheckConfig', +'UptimeCheckIp', +'UptimeCheckRegion', +'UptimeCheckServiceClient', +'VerifyNotificationChannelRequest', +'WindowsBasedSli', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/gapic_metadata.json b/owl-bot-staging/v3/google/cloud/monitoring_v3/gapic_metadata.json new file mode 100644 index 00000000..0b3e214a --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/gapic_metadata.json @@ -0,0 +1,567 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.monitoring_v3", + "protoPackage": "google.monitoring.v3", + "schema": "1.0", + "services": { + "AlertPolicyService": { + "clients": { + "grpc": { + "libraryClient": "AlertPolicyServiceClient", + "rpcs": { + "CreateAlertPolicy": { + "methods": [ + "create_alert_policy" + ] + }, + "DeleteAlertPolicy": { + "methods": [ + "delete_alert_policy" + ] + }, + "GetAlertPolicy": { + "methods": [ + "get_alert_policy" + ] + }, + "ListAlertPolicies": { + "methods": [ + "list_alert_policies" + ] + }, + "UpdateAlertPolicy": { + "methods": [ + "update_alert_policy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AlertPolicyServiceAsyncClient", + "rpcs": { + "CreateAlertPolicy": { + "methods": [ + "create_alert_policy" + ] + }, + "DeleteAlertPolicy": { + "methods": [ + "delete_alert_policy" + ] + }, + "GetAlertPolicy": { + "methods": [ + "get_alert_policy" + ] + }, + "ListAlertPolicies": { + "methods": [ + "list_alert_policies" + ] + }, + "UpdateAlertPolicy": { + "methods": [ + "update_alert_policy" + ] + } + } + } + } + }, + "GroupService": { + "clients": { + "grpc": { + "libraryClient": "GroupServiceClient", + "rpcs": { + "CreateGroup": { + "methods": [ + "create_group" + ] + }, + "DeleteGroup": { + "methods": [ + "delete_group" + ] + }, + "GetGroup": { + "methods": [ + "get_group" + ] + }, + "ListGroupMembers": { + "methods": [ + "list_group_members" + ] + }, + "ListGroups": { + "methods": [ + "list_groups" + ] + }, + "UpdateGroup": { + "methods": [ + "update_group" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GroupServiceAsyncClient", + "rpcs": { + "CreateGroup": { + "methods": [ + "create_group" + ] + }, + "DeleteGroup": { + "methods": [ + "delete_group" + ] + }, + "GetGroup": { + "methods": [ + "get_group" + ] + }, + "ListGroupMembers": { + "methods": [ + "list_group_members" + ] + }, + "ListGroups": { + "methods": [ + "list_groups" + ] + }, + "UpdateGroup": { + "methods": [ + "update_group" + ] + } + } + } + } + }, + "MetricService": { + "clients": { + "grpc": { + "libraryClient": "MetricServiceClient", + "rpcs": { + "CreateMetricDescriptor": { + "methods": [ + "create_metric_descriptor" + ] + }, + "CreateTimeSeries": { + "methods": [ + "create_time_series" + ] + }, + "DeleteMetricDescriptor": { + "methods": [ + "delete_metric_descriptor" + ] + }, + "GetMetricDescriptor": { + "methods": [ + "get_metric_descriptor" + ] + }, + "GetMonitoredResourceDescriptor": { + "methods": [ + "get_monitored_resource_descriptor" + ] + }, + "ListMetricDescriptors": { + "methods": [ + "list_metric_descriptors" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "ListTimeSeries": { + "methods": [ + "list_time_series" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetricServiceAsyncClient", + "rpcs": { + "CreateMetricDescriptor": { + "methods": [ + "create_metric_descriptor" + ] + }, + "CreateTimeSeries": { + "methods": [ + "create_time_series" + ] + }, + "DeleteMetricDescriptor": { + "methods": [ + "delete_metric_descriptor" + ] + }, + "GetMetricDescriptor": { + "methods": [ + "get_metric_descriptor" + ] + }, + "GetMonitoredResourceDescriptor": { + "methods": [ + "get_monitored_resource_descriptor" + ] + }, + "ListMetricDescriptors": { + "methods": [ + "list_metric_descriptors" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "ListTimeSeries": { + "methods": [ + "list_time_series" + ] + } + } + } + } + }, + "NotificationChannelService": { + "clients": { + "grpc": { + "libraryClient": "NotificationChannelServiceClient", + "rpcs": { + "CreateNotificationChannel": { + "methods": [ + "create_notification_channel" + ] + }, + "DeleteNotificationChannel": { + "methods": [ + "delete_notification_channel" + ] + }, + "GetNotificationChannel": { + "methods": [ + "get_notification_channel" + ] + }, + "GetNotificationChannelDescriptor": { + "methods": [ + "get_notification_channel_descriptor" + ] + }, + "GetNotificationChannelVerificationCode": { + "methods": [ + "get_notification_channel_verification_code" + ] + }, + "ListNotificationChannelDescriptors": { + "methods": [ + "list_notification_channel_descriptors" + ] + }, + "ListNotificationChannels": { + "methods": [ + "list_notification_channels" + ] + }, + "SendNotificationChannelVerificationCode": { + "methods": [ + "send_notification_channel_verification_code" + ] + }, + "UpdateNotificationChannel": { + "methods": [ + "update_notification_channel" + ] + }, + "VerifyNotificationChannel": { + "methods": [ + "verify_notification_channel" + ] + } + } + }, + "grpc-async": { + "libraryClient": "NotificationChannelServiceAsyncClient", + "rpcs": { + "CreateNotificationChannel": { + "methods": [ + "create_notification_channel" + ] + }, + "DeleteNotificationChannel": { + "methods": [ + "delete_notification_channel" + ] + }, + "GetNotificationChannel": { + "methods": [ + "get_notification_channel" + ] + }, + "GetNotificationChannelDescriptor": { + "methods": [ + "get_notification_channel_descriptor" + ] + }, + "GetNotificationChannelVerificationCode": { + "methods": [ + "get_notification_channel_verification_code" + ] + }, + "ListNotificationChannelDescriptors": { + "methods": [ + "list_notification_channel_descriptors" + ] + }, + "ListNotificationChannels": { + "methods": [ + "list_notification_channels" + ] + }, + "SendNotificationChannelVerificationCode": { + "methods": [ + "send_notification_channel_verification_code" + ] + }, + "UpdateNotificationChannel": { + "methods": [ + "update_notification_channel" + ] + }, + "VerifyNotificationChannel": { + "methods": [ + "verify_notification_channel" + ] + } + } + } + } + }, + "QueryService": { + "clients": { + "grpc": { + "libraryClient": "QueryServiceClient", + "rpcs": { + "QueryTimeSeries": { + "methods": [ + "query_time_series" + ] + } + } + }, + "grpc-async": { + "libraryClient": "QueryServiceAsyncClient", + "rpcs": { + "QueryTimeSeries": { + "methods": [ + "query_time_series" + ] + } + } + } + } + }, + "ServiceMonitoringService": { + "clients": { + "grpc": { + "libraryClient": "ServiceMonitoringServiceClient", + "rpcs": { + "CreateService": { + "methods": [ + "create_service" + ] + }, + "CreateServiceLevelObjective": { + "methods": [ + "create_service_level_objective" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "DeleteServiceLevelObjective": { + "methods": [ + "delete_service_level_objective" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "GetServiceLevelObjective": { + "methods": [ + "get_service_level_objective" + ] + }, + "ListServiceLevelObjectives": { + "methods": [ + "list_service_level_objectives" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "UpdateService": { + "methods": [ + "update_service" + ] + }, + "UpdateServiceLevelObjective": { + "methods": [ + "update_service_level_objective" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ServiceMonitoringServiceAsyncClient", + "rpcs": { + "CreateService": { + "methods": [ + "create_service" + ] + }, + "CreateServiceLevelObjective": { + "methods": [ + "create_service_level_objective" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "DeleteServiceLevelObjective": { + "methods": [ + "delete_service_level_objective" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "GetServiceLevelObjective": { + "methods": [ + "get_service_level_objective" + ] + }, + "ListServiceLevelObjectives": { + "methods": [ + "list_service_level_objectives" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "UpdateService": { + "methods": [ + "update_service" + ] + }, + "UpdateServiceLevelObjective": { + "methods": [ + "update_service_level_objective" + ] + } + } + } + } + }, + "UptimeCheckService": { + "clients": { + "grpc": { + "libraryClient": "UptimeCheckServiceClient", + "rpcs": { + "CreateUptimeCheckConfig": { + "methods": [ + "create_uptime_check_config" + ] + }, + "DeleteUptimeCheckConfig": { + "methods": [ + "delete_uptime_check_config" + ] + }, + "GetUptimeCheckConfig": { + "methods": [ + "get_uptime_check_config" + ] + }, + "ListUptimeCheckConfigs": { + "methods": [ + "list_uptime_check_configs" + ] + }, + "ListUptimeCheckIps": { + "methods": [ + "list_uptime_check_ips" + ] + }, + "UpdateUptimeCheckConfig": { + "methods": [ + "update_uptime_check_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "UptimeCheckServiceAsyncClient", + "rpcs": { + "CreateUptimeCheckConfig": { + "methods": [ + "create_uptime_check_config" + ] + }, + "DeleteUptimeCheckConfig": { + "methods": [ + "delete_uptime_check_config" + ] + }, + "GetUptimeCheckConfig": { + "methods": [ + "get_uptime_check_config" + ] + }, + "ListUptimeCheckConfigs": { + "methods": [ + "list_uptime_check_configs" + ] + }, + "ListUptimeCheckIps": { + "methods": [ + "list_uptime_check_ips" + ] + }, + "UpdateUptimeCheckConfig": { + "methods": [ + "update_uptime_check_config" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/py.typed b/owl-bot-staging/v3/google/cloud/monitoring_v3/py.typed new file mode 100644 index 00000000..55d895b0 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-monitoring package uses inline types. diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/__init__.py new file mode 100644 index 00000000..a66d4d58 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import AlertPolicyServiceClient +from .async_client import AlertPolicyServiceAsyncClient + +__all__ = ( + 'AlertPolicyServiceClient', + 'AlertPolicyServiceAsyncClient', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py new file mode 100644 index 00000000..7e36ea78 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py @@ -0,0 +1,673 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.services.alert_policy_service import pagers +from google.cloud.monitoring_v3.types import alert +from google.cloud.monitoring_v3.types import alert_service +from google.cloud.monitoring_v3.types import mutation_record +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import AlertPolicyServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import AlertPolicyServiceGrpcAsyncIOTransport +from .client import AlertPolicyServiceClient + + +class AlertPolicyServiceAsyncClient: + """The AlertPolicyService API is used to manage (list, create, delete, + edit) alert policies in Stackdriver Monitoring. An alerting policy + is a description of the conditions under which some aspect of your + system is considered to be "unhealthy" and the ways to notify people + or services about this state. In addition to using this API, alert + policies can also be managed through `Stackdriver + Monitoring `__, which can + be reached by clicking the "Monitoring" tab in `Cloud + Console `__. + """ + + _client: AlertPolicyServiceClient + + DEFAULT_ENDPOINT = AlertPolicyServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AlertPolicyServiceClient.DEFAULT_MTLS_ENDPOINT + + alert_policy_path = staticmethod(AlertPolicyServiceClient.alert_policy_path) + parse_alert_policy_path = staticmethod(AlertPolicyServiceClient.parse_alert_policy_path) + alert_policy_condition_path = staticmethod(AlertPolicyServiceClient.alert_policy_condition_path) + parse_alert_policy_condition_path = staticmethod(AlertPolicyServiceClient.parse_alert_policy_condition_path) + common_billing_account_path = staticmethod(AlertPolicyServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(AlertPolicyServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(AlertPolicyServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(AlertPolicyServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(AlertPolicyServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(AlertPolicyServiceClient.parse_common_organization_path) + common_project_path = staticmethod(AlertPolicyServiceClient.common_project_path) + parse_common_project_path = staticmethod(AlertPolicyServiceClient.parse_common_project_path) + common_location_path = staticmethod(AlertPolicyServiceClient.common_location_path) + parse_common_location_path = staticmethod(AlertPolicyServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlertPolicyServiceAsyncClient: The constructed client. + """ + return AlertPolicyServiceClient.from_service_account_info.__func__(AlertPolicyServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlertPolicyServiceAsyncClient: The constructed client. + """ + return AlertPolicyServiceClient.from_service_account_file.__func__(AlertPolicyServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AlertPolicyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AlertPolicyServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(AlertPolicyServiceClient).get_transport_class, type(AlertPolicyServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, AlertPolicyServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alert policy service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AlertPolicyServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AlertPolicyServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_alert_policies(self, + request: alert_service.ListAlertPoliciesRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAlertPoliciesAsyncPager: + r"""Lists the existing alerting policies for the + workspace. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListAlertPoliciesRequest`): + The request object. The protocol for the + `ListAlertPolicies` request. + name (:class:`str`): + Required. The + `project `__ + whose alert policies are to be listed. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + Note that this field names the parent container in which + the alerting policies to be listed are stored. To + retrieve a single alerting policy by name, use the + [GetAlertPolicy][google.monitoring.v3.AlertPolicyService.GetAlertPolicy] + operation, instead. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.alert_policy_service.pagers.ListAlertPoliciesAsyncPager: + The protocol for the ListAlertPolicies response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = alert_service.ListAlertPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_alert_policies, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAlertPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_alert_policy(self, + request: alert_service.GetAlertPolicyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> alert.AlertPolicy: + r"""Gets a single alerting policy. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetAlertPolicyRequest`): + The request object. The protocol for the + `GetAlertPolicy` request. + name (:class:`str`): + Required. The alerting policy to retrieve. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.AlertPolicy: + A description of the conditions under which some aspect of your system is + considered to be "unhealthy" and the ways to notify + people or services about this state. For an overview + of alert policies, see [Introduction to + Alerting](\ https://cloud.google.com/monitoring/alerts/). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = alert_service.GetAlertPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_alert_policy, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_alert_policy(self, + request: alert_service.CreateAlertPolicyRequest = None, + *, + name: str = None, + alert_policy: alert.AlertPolicy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> alert.AlertPolicy: + r"""Creates a new alerting policy. + + Args: + request (:class:`google.cloud.monitoring_v3.types.CreateAlertPolicyRequest`): + The request object. The protocol for the + `CreateAlertPolicy` request. + name (:class:`str`): + Required. The + `project `__ + in which to create the alerting policy. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + Note that this field names the parent container in which + the alerting policy will be written, not the name of the + created policy. \|name\| must be a host project of a + workspace, otherwise INVALID_ARGUMENT error will return. + The alerting policy that is returned will have a name + that contains a normalized representation of this name + as a prefix but adds a suffix of the form + ``/alertPolicies/[ALERT_POLICY_ID]``, identifying the + policy in the container. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + alert_policy (:class:`google.cloud.monitoring_v3.types.AlertPolicy`): + Required. The requested alerting policy. You should omit + the ``name`` field in this policy. The name will be + returned in the new policy, including a new + ``[ALERT_POLICY_ID]`` value. + + This corresponds to the ``alert_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.AlertPolicy: + A description of the conditions under which some aspect of your system is + considered to be "unhealthy" and the ways to notify + people or services about this state. For an overview + of alert policies, see [Introduction to + Alerting](\ https://cloud.google.com/monitoring/alerts/). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, alert_policy]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = alert_service.CreateAlertPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if alert_policy is not None: + request.alert_policy = alert_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_alert_policy, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_alert_policy(self, + request: alert_service.DeleteAlertPolicyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an alerting policy. + + Args: + request (:class:`google.cloud.monitoring_v3.types.DeleteAlertPolicyRequest`): + The request object. The protocol for the + `DeleteAlertPolicy` request. + name (:class:`str`): + Required. The alerting policy to delete. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] + + For more information, see + [AlertPolicy][google.monitoring.v3.AlertPolicy]. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = alert_service.DeleteAlertPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_alert_policy, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_alert_policy(self, + request: alert_service.UpdateAlertPolicyRequest = None, + *, + update_mask: field_mask_pb2.FieldMask = None, + alert_policy: alert.AlertPolicy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> alert.AlertPolicy: + r"""Updates an alerting policy. You can either replace the entire + policy with a new one or replace only certain fields in the + current alerting policy by specifying the fields to be updated + via ``updateMask``. Returns the updated alerting policy. + + Args: + request (:class:`google.cloud.monitoring_v3.types.UpdateAlertPolicyRequest`): + The request object. The protocol for the + `UpdateAlertPolicy` request. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. A list of alerting policy field names. If this + field is not empty, each listed field in the existing + alerting policy is set to the value of the corresponding + field in the supplied policy (``alert_policy``), or to + the field's default value if the field is not in the + supplied alerting policy. Fields not listed retain their + previous value. + + Examples of valid field masks include ``display_name``, + ``documentation``, ``documentation.content``, + ``documentation.mime_type``, ``user_labels``, + ``user_label.nameofkey``, ``enabled``, ``conditions``, + ``combiner``, etc. + + If this field is empty, then the supplied alerting + policy replaces the existing policy. It is the same as + deleting the existing policy and adding the supplied + policy, except for the following: + + - The new policy will have the same + ``[ALERT_POLICY_ID]`` as the former policy. This + gives you continuity with the former policy in your + notifications and incidents. + - Conditions in the new policy will keep their former + ``[CONDITION_ID]`` if the supplied condition includes + the ``name`` field with that ``[CONDITION_ID]``. If + the supplied condition omits the ``name`` field, then + a new ``[CONDITION_ID]`` is created. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + alert_policy (:class:`google.cloud.monitoring_v3.types.AlertPolicy`): + Required. The updated alerting policy or the updated + values for the fields listed in ``update_mask``. If + ``update_mask`` is not empty, any fields in this policy + that are not in ``update_mask`` are ignored. + + This corresponds to the ``alert_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.AlertPolicy: + A description of the conditions under which some aspect of your system is + considered to be "unhealthy" and the ways to notify + people or services about this state. For an overview + of alert policies, see [Introduction to + Alerting](\ https://cloud.google.com/monitoring/alerts/). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([update_mask, alert_policy]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = alert_service.UpdateAlertPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if update_mask is not None: + request.update_mask = update_mask + if alert_policy is not None: + request.alert_policy = alert_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_alert_policy, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("alert_policy.name", request.alert_policy.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "AlertPolicyServiceAsyncClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/client.py new file mode 100644 index 00000000..b9aee512 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/client.py @@ -0,0 +1,855 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.services.alert_policy_service import pagers +from google.cloud.monitoring_v3.types import alert +from google.cloud.monitoring_v3.types import alert_service +from google.cloud.monitoring_v3.types import mutation_record +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import AlertPolicyServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import AlertPolicyServiceGrpcTransport +from .transports.grpc_asyncio import AlertPolicyServiceGrpcAsyncIOTransport + + +class AlertPolicyServiceClientMeta(type): + """Metaclass for the AlertPolicyService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[AlertPolicyServiceTransport]] + _transport_registry["grpc"] = AlertPolicyServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AlertPolicyServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[AlertPolicyServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AlertPolicyServiceClient(metaclass=AlertPolicyServiceClientMeta): + """The AlertPolicyService API is used to manage (list, create, delete, + edit) alert policies in Stackdriver Monitoring. An alerting policy + is a description of the conditions under which some aspect of your + system is considered to be "unhealthy" and the ways to notify people + or services about this state. In addition to using this API, alert + policies can also be managed through `Stackdriver + Monitoring `__, which can + be reached by clicking the "Monitoring" tab in `Cloud + Console `__. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "monitoring.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlertPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlertPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AlertPolicyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AlertPolicyServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def alert_policy_path(project: str,alert_policy: str,) -> str: + """Returns a fully-qualified alert_policy string.""" + return "projects/{project}/alertPolicies/{alert_policy}".format(project=project, alert_policy=alert_policy, ) + + @staticmethod + def parse_alert_policy_path(path: str) -> Dict[str,str]: + """Parses a alert_policy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/alertPolicies/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def alert_policy_condition_path(project: str,alert_policy: str,condition: str,) -> str: + """Returns a fully-qualified alert_policy_condition string.""" + return "projects/{project}/alertPolicies/{alert_policy}/conditions/{condition}".format(project=project, alert_policy=alert_policy, condition=condition, ) + + @staticmethod + def parse_alert_policy_condition_path(path: str) -> Dict[str,str]: + """Parses a alert_policy_condition path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/alertPolicies/(?P.+?)/conditions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AlertPolicyServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alert policy service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AlertPolicyServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AlertPolicyServiceTransport): + # transport is a AlertPolicyServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_alert_policies(self, + request: alert_service.ListAlertPoliciesRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAlertPoliciesPager: + r"""Lists the existing alerting policies for the + workspace. + + Args: + request (google.cloud.monitoring_v3.types.ListAlertPoliciesRequest): + The request object. The protocol for the + `ListAlertPolicies` request. + name (str): + Required. The + `project `__ + whose alert policies are to be listed. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + Note that this field names the parent container in which + the alerting policies to be listed are stored. To + retrieve a single alerting policy by name, use the + [GetAlertPolicy][google.monitoring.v3.AlertPolicyService.GetAlertPolicy] + operation, instead. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.alert_policy_service.pagers.ListAlertPoliciesPager: + The protocol for the ListAlertPolicies response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a alert_service.ListAlertPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, alert_service.ListAlertPoliciesRequest): + request = alert_service.ListAlertPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_alert_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAlertPoliciesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_alert_policy(self, + request: alert_service.GetAlertPolicyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> alert.AlertPolicy: + r"""Gets a single alerting policy. + + Args: + request (google.cloud.monitoring_v3.types.GetAlertPolicyRequest): + The request object. The protocol for the + `GetAlertPolicy` request. + name (str): + Required. The alerting policy to retrieve. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.AlertPolicy: + A description of the conditions under which some aspect of your system is + considered to be "unhealthy" and the ways to notify + people or services about this state. For an overview + of alert policies, see [Introduction to + Alerting](\ https://cloud.google.com/monitoring/alerts/). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a alert_service.GetAlertPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, alert_service.GetAlertPolicyRequest): + request = alert_service.GetAlertPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_alert_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_alert_policy(self, + request: alert_service.CreateAlertPolicyRequest = None, + *, + name: str = None, + alert_policy: alert.AlertPolicy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> alert.AlertPolicy: + r"""Creates a new alerting policy. + + Args: + request (google.cloud.monitoring_v3.types.CreateAlertPolicyRequest): + The request object. The protocol for the + `CreateAlertPolicy` request. + name (str): + Required. The + `project `__ + in which to create the alerting policy. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + Note that this field names the parent container in which + the alerting policy will be written, not the name of the + created policy. \|name\| must be a host project of a + workspace, otherwise INVALID_ARGUMENT error will return. + The alerting policy that is returned will have a name + that contains a normalized representation of this name + as a prefix but adds a suffix of the form + ``/alertPolicies/[ALERT_POLICY_ID]``, identifying the + policy in the container. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + alert_policy (google.cloud.monitoring_v3.types.AlertPolicy): + Required. The requested alerting policy. You should omit + the ``name`` field in this policy. The name will be + returned in the new policy, including a new + ``[ALERT_POLICY_ID]`` value. + + This corresponds to the ``alert_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.AlertPolicy: + A description of the conditions under which some aspect of your system is + considered to be "unhealthy" and the ways to notify + people or services about this state. For an overview + of alert policies, see [Introduction to + Alerting](\ https://cloud.google.com/monitoring/alerts/). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, alert_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a alert_service.CreateAlertPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, alert_service.CreateAlertPolicyRequest): + request = alert_service.CreateAlertPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if alert_policy is not None: + request.alert_policy = alert_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_alert_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_alert_policy(self, + request: alert_service.DeleteAlertPolicyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an alerting policy. + + Args: + request (google.cloud.monitoring_v3.types.DeleteAlertPolicyRequest): + The request object. The protocol for the + `DeleteAlertPolicy` request. + name (str): + Required. The alerting policy to delete. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] + + For more information, see + [AlertPolicy][google.monitoring.v3.AlertPolicy]. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a alert_service.DeleteAlertPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, alert_service.DeleteAlertPolicyRequest): + request = alert_service.DeleteAlertPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_alert_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_alert_policy(self, + request: alert_service.UpdateAlertPolicyRequest = None, + *, + update_mask: field_mask_pb2.FieldMask = None, + alert_policy: alert.AlertPolicy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> alert.AlertPolicy: + r"""Updates an alerting policy. You can either replace the entire + policy with a new one or replace only certain fields in the + current alerting policy by specifying the fields to be updated + via ``updateMask``. Returns the updated alerting policy. + + Args: + request (google.cloud.monitoring_v3.types.UpdateAlertPolicyRequest): + The request object. The protocol for the + `UpdateAlertPolicy` request. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. A list of alerting policy field names. If this + field is not empty, each listed field in the existing + alerting policy is set to the value of the corresponding + field in the supplied policy (``alert_policy``), or to + the field's default value if the field is not in the + supplied alerting policy. Fields not listed retain their + previous value. + + Examples of valid field masks include ``display_name``, + ``documentation``, ``documentation.content``, + ``documentation.mime_type``, ``user_labels``, + ``user_label.nameofkey``, ``enabled``, ``conditions``, + ``combiner``, etc. + + If this field is empty, then the supplied alerting + policy replaces the existing policy. It is the same as + deleting the existing policy and adding the supplied + policy, except for the following: + + - The new policy will have the same + ``[ALERT_POLICY_ID]`` as the former policy. This + gives you continuity with the former policy in your + notifications and incidents. + - Conditions in the new policy will keep their former + ``[CONDITION_ID]`` if the supplied condition includes + the ``name`` field with that ``[CONDITION_ID]``. If + the supplied condition omits the ``name`` field, then + a new ``[CONDITION_ID]`` is created. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + alert_policy (google.cloud.monitoring_v3.types.AlertPolicy): + Required. The updated alerting policy or the updated + values for the fields listed in ``update_mask``. If + ``update_mask`` is not empty, any fields in this policy + that are not in ``update_mask`` are ignored. + + This corresponds to the ``alert_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.AlertPolicy: + A description of the conditions under which some aspect of your system is + considered to be "unhealthy" and the ways to notify + people or services about this state. For an overview + of alert policies, see [Introduction to + Alerting](\ https://cloud.google.com/monitoring/alerts/). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([update_mask, alert_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a alert_service.UpdateAlertPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, alert_service.UpdateAlertPolicyRequest): + request = alert_service.UpdateAlertPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if update_mask is not None: + request.update_mask = update_mask + if alert_policy is not None: + request.alert_policy = alert_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_alert_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("alert_policy.name", request.alert_policy.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "AlertPolicyServiceClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/pagers.py new file mode 100644 index 00000000..adcc60ac --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/pagers.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.monitoring_v3.types import alert +from google.cloud.monitoring_v3.types import alert_service + + +class ListAlertPoliciesPager: + """A pager for iterating through ``list_alert_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListAlertPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``alert_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAlertPolicies`` requests and continue to iterate + through the ``alert_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListAlertPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., alert_service.ListAlertPoliciesResponse], + request: alert_service.ListAlertPoliciesRequest, + response: alert_service.ListAlertPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListAlertPoliciesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListAlertPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = alert_service.ListAlertPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[alert_service.ListAlertPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[alert.AlertPolicy]: + for page in self.pages: + yield from page.alert_policies + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAlertPoliciesAsyncPager: + """A pager for iterating through ``list_alert_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListAlertPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``alert_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAlertPolicies`` requests and continue to iterate + through the ``alert_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListAlertPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[alert_service.ListAlertPoliciesResponse]], + request: alert_service.ListAlertPoliciesRequest, + response: alert_service.ListAlertPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListAlertPoliciesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListAlertPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = alert_service.ListAlertPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[alert_service.ListAlertPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[alert.AlertPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.alert_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/__init__.py new file mode 100644 index 00000000..6babe610 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AlertPolicyServiceTransport +from .grpc import AlertPolicyServiceGrpcTransport +from .grpc_asyncio import AlertPolicyServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AlertPolicyServiceTransport]] +_transport_registry['grpc'] = AlertPolicyServiceGrpcTransport +_transport_registry['grpc_asyncio'] = AlertPolicyServiceGrpcAsyncIOTransport + +__all__ = ( + 'AlertPolicyServiceTransport', + 'AlertPolicyServiceGrpcTransport', + 'AlertPolicyServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py new file mode 100644 index 00000000..db4d23af --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.types import alert +from google.cloud.monitoring_v3.types import alert_service +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-monitoring', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class AlertPolicyServiceTransport(abc.ABC): + """Abstract transport class for AlertPolicyService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ) + + DEFAULT_HOST: str = 'monitoring.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_alert_policies: gapic_v1.method.wrap_method( + self.list_alert_policies, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.get_alert_policy: gapic_v1.method.wrap_method( + self.get_alert_policy, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.create_alert_policy: gapic_v1.method.wrap_method( + self.create_alert_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.delete_alert_policy: gapic_v1.method.wrap_method( + self.delete_alert_policy, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.update_alert_policy: gapic_v1.method.wrap_method( + self.update_alert_policy, + default_timeout=30.0, + client_info=client_info, + ), + } + + @property + def list_alert_policies(self) -> Callable[ + [alert_service.ListAlertPoliciesRequest], + Union[ + alert_service.ListAlertPoliciesResponse, + Awaitable[alert_service.ListAlertPoliciesResponse] + ]]: + raise NotImplementedError() + + @property + def get_alert_policy(self) -> Callable[ + [alert_service.GetAlertPolicyRequest], + Union[ + alert.AlertPolicy, + Awaitable[alert.AlertPolicy] + ]]: + raise NotImplementedError() + + @property + def create_alert_policy(self) -> Callable[ + [alert_service.CreateAlertPolicyRequest], + Union[ + alert.AlertPolicy, + Awaitable[alert.AlertPolicy] + ]]: + raise NotImplementedError() + + @property + def delete_alert_policy(self) -> Callable[ + [alert_service.DeleteAlertPolicyRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_alert_policy(self) -> Callable[ + [alert_service.UpdateAlertPolicyRequest], + Union[ + alert.AlertPolicy, + Awaitable[alert.AlertPolicy] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'AlertPolicyServiceTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py new file mode 100644 index 00000000..3d144ee4 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.monitoring_v3.types import alert +from google.cloud.monitoring_v3.types import alert_service +from google.protobuf import empty_pb2 # type: ignore +from .base import AlertPolicyServiceTransport, DEFAULT_CLIENT_INFO + + +class AlertPolicyServiceGrpcTransport(AlertPolicyServiceTransport): + """gRPC backend transport for AlertPolicyService. + + The AlertPolicyService API is used to manage (list, create, delete, + edit) alert policies in Stackdriver Monitoring. An alerting policy + is a description of the conditions under which some aspect of your + system is considered to be "unhealthy" and the ways to notify people + or services about this state. In addition to using this API, alert + policies can also be managed through `Stackdriver + Monitoring `__, which can + be reached by clicking the "Monitoring" tab in `Cloud + Console `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_alert_policies(self) -> Callable[ + [alert_service.ListAlertPoliciesRequest], + alert_service.ListAlertPoliciesResponse]: + r"""Return a callable for the list alert policies method over gRPC. + + Lists the existing alerting policies for the + workspace. + + Returns: + Callable[[~.ListAlertPoliciesRequest], + ~.ListAlertPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_alert_policies' not in self._stubs: + self._stubs['list_alert_policies'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/ListAlertPolicies', + request_serializer=alert_service.ListAlertPoliciesRequest.serialize, + response_deserializer=alert_service.ListAlertPoliciesResponse.deserialize, + ) + return self._stubs['list_alert_policies'] + + @property + def get_alert_policy(self) -> Callable[ + [alert_service.GetAlertPolicyRequest], + alert.AlertPolicy]: + r"""Return a callable for the get alert policy method over gRPC. + + Gets a single alerting policy. + + Returns: + Callable[[~.GetAlertPolicyRequest], + ~.AlertPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_alert_policy' not in self._stubs: + self._stubs['get_alert_policy'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/GetAlertPolicy', + request_serializer=alert_service.GetAlertPolicyRequest.serialize, + response_deserializer=alert.AlertPolicy.deserialize, + ) + return self._stubs['get_alert_policy'] + + @property + def create_alert_policy(self) -> Callable[ + [alert_service.CreateAlertPolicyRequest], + alert.AlertPolicy]: + r"""Return a callable for the create alert policy method over gRPC. + + Creates a new alerting policy. + + Returns: + Callable[[~.CreateAlertPolicyRequest], + ~.AlertPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_alert_policy' not in self._stubs: + self._stubs['create_alert_policy'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/CreateAlertPolicy', + request_serializer=alert_service.CreateAlertPolicyRequest.serialize, + response_deserializer=alert.AlertPolicy.deserialize, + ) + return self._stubs['create_alert_policy'] + + @property + def delete_alert_policy(self) -> Callable[ + [alert_service.DeleteAlertPolicyRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete alert policy method over gRPC. + + Deletes an alerting policy. + + Returns: + Callable[[~.DeleteAlertPolicyRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_alert_policy' not in self._stubs: + self._stubs['delete_alert_policy'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/DeleteAlertPolicy', + request_serializer=alert_service.DeleteAlertPolicyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_alert_policy'] + + @property + def update_alert_policy(self) -> Callable[ + [alert_service.UpdateAlertPolicyRequest], + alert.AlertPolicy]: + r"""Return a callable for the update alert policy method over gRPC. + + Updates an alerting policy. You can either replace the entire + policy with a new one or replace only certain fields in the + current alerting policy by specifying the fields to be updated + via ``updateMask``. Returns the updated alerting policy. + + Returns: + Callable[[~.UpdateAlertPolicyRequest], + ~.AlertPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_alert_policy' not in self._stubs: + self._stubs['update_alert_policy'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/UpdateAlertPolicy', + request_serializer=alert_service.UpdateAlertPolicyRequest.serialize, + response_deserializer=alert.AlertPolicy.deserialize, + ) + return self._stubs['update_alert_policy'] + + +__all__ = ( + 'AlertPolicyServiceGrpcTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..4d0c9160 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py @@ -0,0 +1,374 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.monitoring_v3.types import alert +from google.cloud.monitoring_v3.types import alert_service +from google.protobuf import empty_pb2 # type: ignore +from .base import AlertPolicyServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import AlertPolicyServiceGrpcTransport + + +class AlertPolicyServiceGrpcAsyncIOTransport(AlertPolicyServiceTransport): + """gRPC AsyncIO backend transport for AlertPolicyService. + + The AlertPolicyService API is used to manage (list, create, delete, + edit) alert policies in Stackdriver Monitoring. An alerting policy + is a description of the conditions under which some aspect of your + system is considered to be "unhealthy" and the ways to notify people + or services about this state. In addition to using this API, alert + policies can also be managed through `Stackdriver + Monitoring `__, which can + be reached by clicking the "Monitoring" tab in `Cloud + Console `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_alert_policies(self) -> Callable[ + [alert_service.ListAlertPoliciesRequest], + Awaitable[alert_service.ListAlertPoliciesResponse]]: + r"""Return a callable for the list alert policies method over gRPC. + + Lists the existing alerting policies for the + workspace. + + Returns: + Callable[[~.ListAlertPoliciesRequest], + Awaitable[~.ListAlertPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_alert_policies' not in self._stubs: + self._stubs['list_alert_policies'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/ListAlertPolicies', + request_serializer=alert_service.ListAlertPoliciesRequest.serialize, + response_deserializer=alert_service.ListAlertPoliciesResponse.deserialize, + ) + return self._stubs['list_alert_policies'] + + @property + def get_alert_policy(self) -> Callable[ + [alert_service.GetAlertPolicyRequest], + Awaitable[alert.AlertPolicy]]: + r"""Return a callable for the get alert policy method over gRPC. + + Gets a single alerting policy. + + Returns: + Callable[[~.GetAlertPolicyRequest], + Awaitable[~.AlertPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_alert_policy' not in self._stubs: + self._stubs['get_alert_policy'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/GetAlertPolicy', + request_serializer=alert_service.GetAlertPolicyRequest.serialize, + response_deserializer=alert.AlertPolicy.deserialize, + ) + return self._stubs['get_alert_policy'] + + @property + def create_alert_policy(self) -> Callable[ + [alert_service.CreateAlertPolicyRequest], + Awaitable[alert.AlertPolicy]]: + r"""Return a callable for the create alert policy method over gRPC. + + Creates a new alerting policy. + + Returns: + Callable[[~.CreateAlertPolicyRequest], + Awaitable[~.AlertPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_alert_policy' not in self._stubs: + self._stubs['create_alert_policy'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/CreateAlertPolicy', + request_serializer=alert_service.CreateAlertPolicyRequest.serialize, + response_deserializer=alert.AlertPolicy.deserialize, + ) + return self._stubs['create_alert_policy'] + + @property + def delete_alert_policy(self) -> Callable[ + [alert_service.DeleteAlertPolicyRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete alert policy method over gRPC. + + Deletes an alerting policy. + + Returns: + Callable[[~.DeleteAlertPolicyRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_alert_policy' not in self._stubs: + self._stubs['delete_alert_policy'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/DeleteAlertPolicy', + request_serializer=alert_service.DeleteAlertPolicyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_alert_policy'] + + @property + def update_alert_policy(self) -> Callable[ + [alert_service.UpdateAlertPolicyRequest], + Awaitable[alert.AlertPolicy]]: + r"""Return a callable for the update alert policy method over gRPC. + + Updates an alerting policy. You can either replace the entire + policy with a new one or replace only certain fields in the + current alerting policy by specifying the fields to be updated + via ``updateMask``. Returns the updated alerting policy. + + Returns: + Callable[[~.UpdateAlertPolicyRequest], + Awaitable[~.AlertPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_alert_policy' not in self._stubs: + self._stubs['update_alert_policy'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.AlertPolicyService/UpdateAlertPolicy', + request_serializer=alert_service.UpdateAlertPolicyRequest.serialize, + response_deserializer=alert.AlertPolicy.deserialize, + ) + return self._stubs['update_alert_policy'] + + +__all__ = ( + 'AlertPolicyServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/__init__.py new file mode 100644 index 00000000..a59c4473 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import GroupServiceClient +from .async_client import GroupServiceAsyncClient + +__all__ = ( + 'GroupServiceClient', + 'GroupServiceAsyncClient', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/async_client.py new file mode 100644 index 00000000..09a3e390 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/async_client.py @@ -0,0 +1,793 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.services.group_service import pagers +from google.cloud.monitoring_v3.types import group +from google.cloud.monitoring_v3.types import group as gm_group +from google.cloud.monitoring_v3.types import group_service +from .transports.base import GroupServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import GroupServiceGrpcAsyncIOTransport +from .client import GroupServiceClient + + +class GroupServiceAsyncClient: + """The Group API lets you inspect and manage your + `groups <#google.monitoring.v3.Group>`__. + + A group is a named filter that is used to identify a collection of + monitored resources. Groups are typically used to mirror the + physical and/or logical topology of the environment. Because group + membership is computed dynamically, monitored resources that are + started in the future are automatically placed in matching groups. + By using a group to name monitored resources in, for example, an + alert policy, the target of that alert policy is updated + automatically as monitored resources are added and removed from the + infrastructure. + """ + + _client: GroupServiceClient + + DEFAULT_ENDPOINT = GroupServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GroupServiceClient.DEFAULT_MTLS_ENDPOINT + + group_path = staticmethod(GroupServiceClient.group_path) + parse_group_path = staticmethod(GroupServiceClient.parse_group_path) + common_billing_account_path = staticmethod(GroupServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(GroupServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(GroupServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(GroupServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(GroupServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(GroupServiceClient.parse_common_organization_path) + common_project_path = staticmethod(GroupServiceClient.common_project_path) + parse_common_project_path = staticmethod(GroupServiceClient.parse_common_project_path) + common_location_path = staticmethod(GroupServiceClient.common_location_path) + parse_common_location_path = staticmethod(GroupServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GroupServiceAsyncClient: The constructed client. + """ + return GroupServiceClient.from_service_account_info.__func__(GroupServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GroupServiceAsyncClient: The constructed client. + """ + return GroupServiceClient.from_service_account_file.__func__(GroupServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GroupServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GroupServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(GroupServiceClient).get_transport_class, type(GroupServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, GroupServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the group service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.GroupServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GroupServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_groups(self, + request: group_service.ListGroupsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGroupsAsyncPager: + r"""Lists the existing groups. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListGroupsRequest`): + The request object. The `ListGroup` request. + name (:class:`str`): + Required. The + `project `__ + whose groups are to be listed. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.group_service.pagers.ListGroupsAsyncPager: + The ListGroups response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = group_service.ListGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_groups, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGroupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_group(self, + request: group_service.GetGroupRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> group.Group: + r"""Gets a single group. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetGroupRequest`): + The request object. The `GetGroup` request. + name (:class:`str`): + Required. The group to retrieve. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Group: + The description of a dynamic collection of monitored resources. Each group + has a filter that is matched against monitored + resources and their associated metadata. If a group's + filter matches an available monitored resource, then + that resource is a member of that group. Groups can + contain any number of monitored resources, and each + monitored resource can be a member of any number of + groups. + + Groups can be nested in parent-child hierarchies. The + parentName field identifies an optional parent for + each group. If a group has a parent, then the only + monitored resources available to be matched by the + group's filter are the resources contained in the + parent group. In other words, a group contains the + monitored resources that match its filter and the + filters of all the group's ancestors. A group without + a parent can contain any monitored resource. + + For example, consider an infrastructure running a set + of instances with two user-defined tags: + "environment" and "role". A parent group has a + filter, environment="production". A child of that + parent group has a filter, role="transcoder". The + parent group contains all instances in the production + environment, regardless of their roles. The child + group contains instances that have the transcoder + role *and* are in the production environment. + + The monitored resources contained in a group can + change at any moment, depending on what resources + exist and what filters are associated with the group + and its ancestors. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = group_service.GetGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_group, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_group(self, + request: group_service.CreateGroupRequest = None, + *, + name: str = None, + group: gm_group.Group = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gm_group.Group: + r"""Creates a new group. + + Args: + request (:class:`google.cloud.monitoring_v3.types.CreateGroupRequest`): + The request object. The `CreateGroup` request. + name (:class:`str`): + Required. The + `project `__ + in which to create the group. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + group (:class:`google.cloud.monitoring_v3.types.Group`): + Required. A group definition. It is an error to define + the ``name`` field because the system assigns the name. + + This corresponds to the ``group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Group: + The description of a dynamic collection of monitored resources. Each group + has a filter that is matched against monitored + resources and their associated metadata. If a group's + filter matches an available monitored resource, then + that resource is a member of that group. Groups can + contain any number of monitored resources, and each + monitored resource can be a member of any number of + groups. + + Groups can be nested in parent-child hierarchies. The + parentName field identifies an optional parent for + each group. If a group has a parent, then the only + monitored resources available to be matched by the + group's filter are the resources contained in the + parent group. In other words, a group contains the + monitored resources that match its filter and the + filters of all the group's ancestors. A group without + a parent can contain any monitored resource. + + For example, consider an infrastructure running a set + of instances with two user-defined tags: + "environment" and "role". A parent group has a + filter, environment="production". A child of that + parent group has a filter, role="transcoder". The + parent group contains all instances in the production + environment, regardless of their roles. The child + group contains instances that have the transcoder + role *and* are in the production environment. + + The monitored resources contained in a group can + change at any moment, depending on what resources + exist and what filters are associated with the group + and its ancestors. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, group]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = group_service.CreateGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if group is not None: + request.group = group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_group, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_group(self, + request: group_service.UpdateGroupRequest = None, + *, + group: gm_group.Group = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gm_group.Group: + r"""Updates an existing group. You can change any group attributes + except ``name``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.UpdateGroupRequest`): + The request object. The `UpdateGroup` request. + group (:class:`google.cloud.monitoring_v3.types.Group`): + Required. The new definition of the group. All fields of + the existing group, excepting ``name``, are replaced + with the corresponding fields of this group. + + This corresponds to the ``group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Group: + The description of a dynamic collection of monitored resources. Each group + has a filter that is matched against monitored + resources and their associated metadata. If a group's + filter matches an available monitored resource, then + that resource is a member of that group. Groups can + contain any number of monitored resources, and each + monitored resource can be a member of any number of + groups. + + Groups can be nested in parent-child hierarchies. The + parentName field identifies an optional parent for + each group. If a group has a parent, then the only + monitored resources available to be matched by the + group's filter are the resources contained in the + parent group. In other words, a group contains the + monitored resources that match its filter and the + filters of all the group's ancestors. A group without + a parent can contain any monitored resource. + + For example, consider an infrastructure running a set + of instances with two user-defined tags: + "environment" and "role". A parent group has a + filter, environment="production". A child of that + parent group has a filter, role="transcoder". The + parent group contains all instances in the production + environment, regardless of their roles. The child + group contains instances that have the transcoder + role *and* are in the production environment. + + The monitored resources contained in a group can + change at any moment, depending on what resources + exist and what filters are associated with the group + and its ancestors. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([group]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = group_service.UpdateGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if group is not None: + request.group = group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_group, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=180.0, + ), + default_timeout=180.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("group.name", request.group.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_group(self, + request: group_service.DeleteGroupRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing group. + + Args: + request (:class:`google.cloud.monitoring_v3.types.DeleteGroupRequest`): + The request object. The `DeleteGroup` request. The + default behavior is to be able to delete a single group + without any descendants. + name (:class:`str`): + Required. The group to delete. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = group_service.DeleteGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_group, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_group_members(self, + request: group_service.ListGroupMembersRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGroupMembersAsyncPager: + r"""Lists the monitored resources that are members of a + group. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListGroupMembersRequest`): + The request object. The `ListGroupMembers` request. + name (:class:`str`): + Required. The group whose members are listed. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.group_service.pagers.ListGroupMembersAsyncPager: + The ListGroupMembers response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = group_service.ListGroupMembersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_group_members, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGroupMembersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "GroupServiceAsyncClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/client.py new file mode 100644 index 00000000..07e6f6f0 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/client.py @@ -0,0 +1,954 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.services.group_service import pagers +from google.cloud.monitoring_v3.types import group +from google.cloud.monitoring_v3.types import group as gm_group +from google.cloud.monitoring_v3.types import group_service +from .transports.base import GroupServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import GroupServiceGrpcTransport +from .transports.grpc_asyncio import GroupServiceGrpcAsyncIOTransport + + +class GroupServiceClientMeta(type): + """Metaclass for the GroupService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[GroupServiceTransport]] + _transport_registry["grpc"] = GroupServiceGrpcTransport + _transport_registry["grpc_asyncio"] = GroupServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[GroupServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GroupServiceClient(metaclass=GroupServiceClientMeta): + """The Group API lets you inspect and manage your + `groups <#google.monitoring.v3.Group>`__. + + A group is a named filter that is used to identify a collection of + monitored resources. Groups are typically used to mirror the + physical and/or logical topology of the environment. Because group + membership is computed dynamically, monitored resources that are + started in the future are automatically placed in matching groups. + By using a group to name monitored resources in, for example, an + alert policy, the target of that alert policy is updated + automatically as monitored resources are added and removed from the + infrastructure. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "monitoring.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GroupServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GroupServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GroupServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GroupServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def group_path(project: str,group: str,) -> str: + """Returns a fully-qualified group string.""" + return "projects/{project}/groups/{group}".format(project=project, group=group, ) + + @staticmethod + def parse_group_path(path: str) -> Dict[str,str]: + """Parses a group path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/groups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, GroupServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the group service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GroupServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GroupServiceTransport): + # transport is a GroupServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_groups(self, + request: group_service.ListGroupsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGroupsPager: + r"""Lists the existing groups. + + Args: + request (google.cloud.monitoring_v3.types.ListGroupsRequest): + The request object. The `ListGroup` request. + name (str): + Required. The + `project `__ + whose groups are to be listed. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.group_service.pagers.ListGroupsPager: + The ListGroups response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a group_service.ListGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, group_service.ListGroupsRequest): + request = group_service.ListGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGroupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_group(self, + request: group_service.GetGroupRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> group.Group: + r"""Gets a single group. + + Args: + request (google.cloud.monitoring_v3.types.GetGroupRequest): + The request object. The `GetGroup` request. + name (str): + Required. The group to retrieve. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Group: + The description of a dynamic collection of monitored resources. Each group + has a filter that is matched against monitored + resources and their associated metadata. If a group's + filter matches an available monitored resource, then + that resource is a member of that group. Groups can + contain any number of monitored resources, and each + monitored resource can be a member of any number of + groups. + + Groups can be nested in parent-child hierarchies. The + parentName field identifies an optional parent for + each group. If a group has a parent, then the only + monitored resources available to be matched by the + group's filter are the resources contained in the + parent group. In other words, a group contains the + monitored resources that match its filter and the + filters of all the group's ancestors. A group without + a parent can contain any monitored resource. + + For example, consider an infrastructure running a set + of instances with two user-defined tags: + "environment" and "role". A parent group has a + filter, environment="production". A child of that + parent group has a filter, role="transcoder". The + parent group contains all instances in the production + environment, regardless of their roles. The child + group contains instances that have the transcoder + role *and* are in the production environment. + + The monitored resources contained in a group can + change at any moment, depending on what resources + exist and what filters are associated with the group + and its ancestors. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a group_service.GetGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, group_service.GetGroupRequest): + request = group_service.GetGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_group(self, + request: group_service.CreateGroupRequest = None, + *, + name: str = None, + group: gm_group.Group = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gm_group.Group: + r"""Creates a new group. + + Args: + request (google.cloud.monitoring_v3.types.CreateGroupRequest): + The request object. The `CreateGroup` request. + name (str): + Required. The + `project `__ + in which to create the group. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + group (google.cloud.monitoring_v3.types.Group): + Required. A group definition. It is an error to define + the ``name`` field because the system assigns the name. + + This corresponds to the ``group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Group: + The description of a dynamic collection of monitored resources. Each group + has a filter that is matched against monitored + resources and their associated metadata. If a group's + filter matches an available monitored resource, then + that resource is a member of that group. Groups can + contain any number of monitored resources, and each + monitored resource can be a member of any number of + groups. + + Groups can be nested in parent-child hierarchies. The + parentName field identifies an optional parent for + each group. If a group has a parent, then the only + monitored resources available to be matched by the + group's filter are the resources contained in the + parent group. In other words, a group contains the + monitored resources that match its filter and the + filters of all the group's ancestors. A group without + a parent can contain any monitored resource. + + For example, consider an infrastructure running a set + of instances with two user-defined tags: + "environment" and "role". A parent group has a + filter, environment="production". A child of that + parent group has a filter, role="transcoder". The + parent group contains all instances in the production + environment, regardless of their roles. The child + group contains instances that have the transcoder + role *and* are in the production environment. + + The monitored resources contained in a group can + change at any moment, depending on what resources + exist and what filters are associated with the group + and its ancestors. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a group_service.CreateGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, group_service.CreateGroupRequest): + request = group_service.CreateGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if group is not None: + request.group = group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_group(self, + request: group_service.UpdateGroupRequest = None, + *, + group: gm_group.Group = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gm_group.Group: + r"""Updates an existing group. You can change any group attributes + except ``name``. + + Args: + request (google.cloud.monitoring_v3.types.UpdateGroupRequest): + The request object. The `UpdateGroup` request. + group (google.cloud.monitoring_v3.types.Group): + Required. The new definition of the group. All fields of + the existing group, excepting ``name``, are replaced + with the corresponding fields of this group. + + This corresponds to the ``group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Group: + The description of a dynamic collection of monitored resources. Each group + has a filter that is matched against monitored + resources and their associated metadata. If a group's + filter matches an available monitored resource, then + that resource is a member of that group. Groups can + contain any number of monitored resources, and each + monitored resource can be a member of any number of + groups. + + Groups can be nested in parent-child hierarchies. The + parentName field identifies an optional parent for + each group. If a group has a parent, then the only + monitored resources available to be matched by the + group's filter are the resources contained in the + parent group. In other words, a group contains the + monitored resources that match its filter and the + filters of all the group's ancestors. A group without + a parent can contain any monitored resource. + + For example, consider an infrastructure running a set + of instances with two user-defined tags: + "environment" and "role". A parent group has a + filter, environment="production". A child of that + parent group has a filter, role="transcoder". The + parent group contains all instances in the production + environment, regardless of their roles. The child + group contains instances that have the transcoder + role *and* are in the production environment. + + The monitored resources contained in a group can + change at any moment, depending on what resources + exist and what filters are associated with the group + and its ancestors. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a group_service.UpdateGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, group_service.UpdateGroupRequest): + request = group_service.UpdateGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if group is not None: + request.group = group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("group.name", request.group.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_group(self, + request: group_service.DeleteGroupRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing group. + + Args: + request (google.cloud.monitoring_v3.types.DeleteGroupRequest): + The request object. The `DeleteGroup` request. The + default behavior is to be able to delete a single group + without any descendants. + name (str): + Required. The group to delete. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a group_service.DeleteGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, group_service.DeleteGroupRequest): + request = group_service.DeleteGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_group_members(self, + request: group_service.ListGroupMembersRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGroupMembersPager: + r"""Lists the monitored resources that are members of a + group. + + Args: + request (google.cloud.monitoring_v3.types.ListGroupMembersRequest): + The request object. The `ListGroupMembers` request. + name (str): + Required. The group whose members are listed. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.group_service.pagers.ListGroupMembersPager: + The ListGroupMembers response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a group_service.ListGroupMembersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, group_service.ListGroupMembersRequest): + request = group_service.ListGroupMembersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_group_members] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGroupMembersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "GroupServiceClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/pagers.py new file mode 100644 index 00000000..f6a50de1 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/pagers.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.types import group +from google.cloud.monitoring_v3.types import group_service + + +class ListGroupsPager: + """A pager for iterating through ``list_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``group`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGroups`` requests and continue to iterate + through the ``group`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., group_service.ListGroupsResponse], + request: group_service.ListGroupsRequest, + response: group_service.ListGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListGroupsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = group_service.ListGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[group_service.ListGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[group.Group]: + for page in self.pages: + yield from page.group + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGroupsAsyncPager: + """A pager for iterating through ``list_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``group`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGroups`` requests and continue to iterate + through the ``group`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[group_service.ListGroupsResponse]], + request: group_service.ListGroupsRequest, + response: group_service.ListGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListGroupsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = group_service.ListGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[group_service.ListGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[group.Group]: + async def async_generator(): + async for page in self.pages: + for response in page.group: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGroupMembersPager: + """A pager for iterating through ``list_group_members`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListGroupMembersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``members`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGroupMembers`` requests and continue to iterate + through the ``members`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListGroupMembersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., group_service.ListGroupMembersResponse], + request: group_service.ListGroupMembersRequest, + response: group_service.ListGroupMembersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListGroupMembersRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListGroupMembersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = group_service.ListGroupMembersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[group_service.ListGroupMembersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResource]: + for page in self.pages: + yield from page.members + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGroupMembersAsyncPager: + """A pager for iterating through ``list_group_members`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListGroupMembersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``members`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGroupMembers`` requests and continue to iterate + through the ``members`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListGroupMembersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[group_service.ListGroupMembersResponse]], + request: group_service.ListGroupMembersRequest, + response: group_service.ListGroupMembersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListGroupMembersRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListGroupMembersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = group_service.ListGroupMembersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[group_service.ListGroupMembersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[monitored_resource_pb2.MonitoredResource]: + async def async_generator(): + async for page in self.pages: + for response in page.members: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/__init__.py new file mode 100644 index 00000000..e68392d4 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GroupServiceTransport +from .grpc import GroupServiceGrpcTransport +from .grpc_asyncio import GroupServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GroupServiceTransport]] +_transport_registry['grpc'] = GroupServiceGrpcTransport +_transport_registry['grpc_asyncio'] = GroupServiceGrpcAsyncIOTransport + +__all__ = ( + 'GroupServiceTransport', + 'GroupServiceGrpcTransport', + 'GroupServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/base.py new file mode 100644 index 00000000..8717ba53 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/base.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.types import group +from google.cloud.monitoring_v3.types import group as gm_group +from google.cloud.monitoring_v3.types import group_service +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-monitoring', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class GroupServiceTransport(abc.ABC): + """Abstract transport class for GroupService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ) + + DEFAULT_HOST: str = 'monitoring.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_groups: gapic_v1.method.wrap_method( + self.list_groups, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.get_group: gapic_v1.method.wrap_method( + self.get_group, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.create_group: gapic_v1.method.wrap_method( + self.create_group, + default_timeout=30.0, + client_info=client_info, + ), + self.update_group: gapic_v1.method.wrap_method( + self.update_group, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=180.0, + ), + default_timeout=180.0, + client_info=client_info, + ), + self.delete_group: gapic_v1.method.wrap_method( + self.delete_group, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_group_members: gapic_v1.method.wrap_method( + self.list_group_members, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + } + + @property + def list_groups(self) -> Callable[ + [group_service.ListGroupsRequest], + Union[ + group_service.ListGroupsResponse, + Awaitable[group_service.ListGroupsResponse] + ]]: + raise NotImplementedError() + + @property + def get_group(self) -> Callable[ + [group_service.GetGroupRequest], + Union[ + group.Group, + Awaitable[group.Group] + ]]: + raise NotImplementedError() + + @property + def create_group(self) -> Callable[ + [group_service.CreateGroupRequest], + Union[ + gm_group.Group, + Awaitable[gm_group.Group] + ]]: + raise NotImplementedError() + + @property + def update_group(self) -> Callable[ + [group_service.UpdateGroupRequest], + Union[ + gm_group.Group, + Awaitable[gm_group.Group] + ]]: + raise NotImplementedError() + + @property + def delete_group(self) -> Callable[ + [group_service.DeleteGroupRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_group_members(self) -> Callable[ + [group_service.ListGroupMembersRequest], + Union[ + group_service.ListGroupMembersResponse, + Awaitable[group_service.ListGroupMembersResponse] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'GroupServiceTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc.py new file mode 100644 index 00000000..9dadfef7 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc.py @@ -0,0 +1,398 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.monitoring_v3.types import group +from google.cloud.monitoring_v3.types import group as gm_group +from google.cloud.monitoring_v3.types import group_service +from google.protobuf import empty_pb2 # type: ignore +from .base import GroupServiceTransport, DEFAULT_CLIENT_INFO + + +class GroupServiceGrpcTransport(GroupServiceTransport): + """gRPC backend transport for GroupService. + + The Group API lets you inspect and manage your + `groups <#google.monitoring.v3.Group>`__. + + A group is a named filter that is used to identify a collection of + monitored resources. Groups are typically used to mirror the + physical and/or logical topology of the environment. Because group + membership is computed dynamically, monitored resources that are + started in the future are automatically placed in matching groups. + By using a group to name monitored resources in, for example, an + alert policy, the target of that alert policy is updated + automatically as monitored resources are added and removed from the + infrastructure. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_groups(self) -> Callable[ + [group_service.ListGroupsRequest], + group_service.ListGroupsResponse]: + r"""Return a callable for the list groups method over gRPC. + + Lists the existing groups. + + Returns: + Callable[[~.ListGroupsRequest], + ~.ListGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_groups' not in self._stubs: + self._stubs['list_groups'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/ListGroups', + request_serializer=group_service.ListGroupsRequest.serialize, + response_deserializer=group_service.ListGroupsResponse.deserialize, + ) + return self._stubs['list_groups'] + + @property + def get_group(self) -> Callable[ + [group_service.GetGroupRequest], + group.Group]: + r"""Return a callable for the get group method over gRPC. + + Gets a single group. + + Returns: + Callable[[~.GetGroupRequest], + ~.Group]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_group' not in self._stubs: + self._stubs['get_group'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/GetGroup', + request_serializer=group_service.GetGroupRequest.serialize, + response_deserializer=group.Group.deserialize, + ) + return self._stubs['get_group'] + + @property + def create_group(self) -> Callable[ + [group_service.CreateGroupRequest], + gm_group.Group]: + r"""Return a callable for the create group method over gRPC. + + Creates a new group. + + Returns: + Callable[[~.CreateGroupRequest], + ~.Group]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_group' not in self._stubs: + self._stubs['create_group'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/CreateGroup', + request_serializer=group_service.CreateGroupRequest.serialize, + response_deserializer=gm_group.Group.deserialize, + ) + return self._stubs['create_group'] + + @property + def update_group(self) -> Callable[ + [group_service.UpdateGroupRequest], + gm_group.Group]: + r"""Return a callable for the update group method over gRPC. + + Updates an existing group. You can change any group attributes + except ``name``. + + Returns: + Callable[[~.UpdateGroupRequest], + ~.Group]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_group' not in self._stubs: + self._stubs['update_group'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/UpdateGroup', + request_serializer=group_service.UpdateGroupRequest.serialize, + response_deserializer=gm_group.Group.deserialize, + ) + return self._stubs['update_group'] + + @property + def delete_group(self) -> Callable[ + [group_service.DeleteGroupRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete group method over gRPC. + + Deletes an existing group. + + Returns: + Callable[[~.DeleteGroupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_group' not in self._stubs: + self._stubs['delete_group'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/DeleteGroup', + request_serializer=group_service.DeleteGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_group'] + + @property + def list_group_members(self) -> Callable[ + [group_service.ListGroupMembersRequest], + group_service.ListGroupMembersResponse]: + r"""Return a callable for the list group members method over gRPC. + + Lists the monitored resources that are members of a + group. + + Returns: + Callable[[~.ListGroupMembersRequest], + ~.ListGroupMembersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_group_members' not in self._stubs: + self._stubs['list_group_members'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/ListGroupMembers', + request_serializer=group_service.ListGroupMembersRequest.serialize, + response_deserializer=group_service.ListGroupMembersResponse.deserialize, + ) + return self._stubs['list_group_members'] + + +__all__ = ( + 'GroupServiceGrpcTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..cf4b535e --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py @@ -0,0 +1,402 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.monitoring_v3.types import group +from google.cloud.monitoring_v3.types import group as gm_group +from google.cloud.monitoring_v3.types import group_service +from google.protobuf import empty_pb2 # type: ignore +from .base import GroupServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import GroupServiceGrpcTransport + + +class GroupServiceGrpcAsyncIOTransport(GroupServiceTransport): + """gRPC AsyncIO backend transport for GroupService. + + The Group API lets you inspect and manage your + `groups <#google.monitoring.v3.Group>`__. + + A group is a named filter that is used to identify a collection of + monitored resources. Groups are typically used to mirror the + physical and/or logical topology of the environment. Because group + membership is computed dynamically, monitored resources that are + started in the future are automatically placed in matching groups. + By using a group to name monitored resources in, for example, an + alert policy, the target of that alert policy is updated + automatically as monitored resources are added and removed from the + infrastructure. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_groups(self) -> Callable[ + [group_service.ListGroupsRequest], + Awaitable[group_service.ListGroupsResponse]]: + r"""Return a callable for the list groups method over gRPC. + + Lists the existing groups. + + Returns: + Callable[[~.ListGroupsRequest], + Awaitable[~.ListGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_groups' not in self._stubs: + self._stubs['list_groups'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/ListGroups', + request_serializer=group_service.ListGroupsRequest.serialize, + response_deserializer=group_service.ListGroupsResponse.deserialize, + ) + return self._stubs['list_groups'] + + @property + def get_group(self) -> Callable[ + [group_service.GetGroupRequest], + Awaitable[group.Group]]: + r"""Return a callable for the get group method over gRPC. + + Gets a single group. + + Returns: + Callable[[~.GetGroupRequest], + Awaitable[~.Group]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_group' not in self._stubs: + self._stubs['get_group'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/GetGroup', + request_serializer=group_service.GetGroupRequest.serialize, + response_deserializer=group.Group.deserialize, + ) + return self._stubs['get_group'] + + @property + def create_group(self) -> Callable[ + [group_service.CreateGroupRequest], + Awaitable[gm_group.Group]]: + r"""Return a callable for the create group method over gRPC. + + Creates a new group. + + Returns: + Callable[[~.CreateGroupRequest], + Awaitable[~.Group]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_group' not in self._stubs: + self._stubs['create_group'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/CreateGroup', + request_serializer=group_service.CreateGroupRequest.serialize, + response_deserializer=gm_group.Group.deserialize, + ) + return self._stubs['create_group'] + + @property + def update_group(self) -> Callable[ + [group_service.UpdateGroupRequest], + Awaitable[gm_group.Group]]: + r"""Return a callable for the update group method over gRPC. + + Updates an existing group. You can change any group attributes + except ``name``. + + Returns: + Callable[[~.UpdateGroupRequest], + Awaitable[~.Group]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_group' not in self._stubs: + self._stubs['update_group'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/UpdateGroup', + request_serializer=group_service.UpdateGroupRequest.serialize, + response_deserializer=gm_group.Group.deserialize, + ) + return self._stubs['update_group'] + + @property + def delete_group(self) -> Callable[ + [group_service.DeleteGroupRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete group method over gRPC. + + Deletes an existing group. + + Returns: + Callable[[~.DeleteGroupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_group' not in self._stubs: + self._stubs['delete_group'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/DeleteGroup', + request_serializer=group_service.DeleteGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_group'] + + @property + def list_group_members(self) -> Callable[ + [group_service.ListGroupMembersRequest], + Awaitable[group_service.ListGroupMembersResponse]]: + r"""Return a callable for the list group members method over gRPC. + + Lists the monitored resources that are members of a + group. + + Returns: + Callable[[~.ListGroupMembersRequest], + Awaitable[~.ListGroupMembersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_group_members' not in self._stubs: + self._stubs['list_group_members'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.GroupService/ListGroupMembers', + request_serializer=group_service.ListGroupMembersRequest.serialize, + response_deserializer=group_service.ListGroupMembersResponse.deserialize, + ) + return self._stubs['list_group_members'] + + +__all__ = ( + 'GroupServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/__init__.py new file mode 100644 index 00000000..836589ba --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MetricServiceClient +from .async_client import MetricServiceAsyncClient + +__all__ = ( + 'MetricServiceClient', + 'MetricServiceAsyncClient', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/async_client.py new file mode 100644 index 00000000..6ac8f37b --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/async_client.py @@ -0,0 +1,967 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.services.metric_service import pagers +from google.cloud.monitoring_v3.types import common +from google.cloud.monitoring_v3.types import metric as gm_metric +from google.cloud.monitoring_v3.types import metric_service +from .transports.base import MetricServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MetricServiceGrpcAsyncIOTransport +from .client import MetricServiceClient + + +class MetricServiceAsyncClient: + """Manages metric descriptors, monitored resource descriptors, + and time series data. + """ + + _client: MetricServiceClient + + DEFAULT_ENDPOINT = MetricServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetricServiceClient.DEFAULT_MTLS_ENDPOINT + + metric_descriptor_path = staticmethod(MetricServiceClient.metric_descriptor_path) + parse_metric_descriptor_path = staticmethod(MetricServiceClient.parse_metric_descriptor_path) + monitored_resource_descriptor_path = staticmethod(MetricServiceClient.monitored_resource_descriptor_path) + parse_monitored_resource_descriptor_path = staticmethod(MetricServiceClient.parse_monitored_resource_descriptor_path) + time_series_path = staticmethod(MetricServiceClient.time_series_path) + parse_time_series_path = staticmethod(MetricServiceClient.parse_time_series_path) + common_billing_account_path = staticmethod(MetricServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(MetricServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(MetricServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(MetricServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(MetricServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(MetricServiceClient.parse_common_organization_path) + common_project_path = staticmethod(MetricServiceClient.common_project_path) + parse_common_project_path = staticmethod(MetricServiceClient.parse_common_project_path) + common_location_path = staticmethod(MetricServiceClient.common_location_path) + parse_common_location_path = staticmethod(MetricServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricServiceAsyncClient: The constructed client. + """ + return MetricServiceClient.from_service_account_info.__func__(MetricServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricServiceAsyncClient: The constructed client. + """ + return MetricServiceClient.from_service_account_file.__func__(MetricServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetricServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(MetricServiceClient).get_transport_class, type(MetricServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, MetricServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metric service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MetricServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MetricServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_monitored_resource_descriptors(self, + request: metric_service.ListMonitoredResourceDescriptorsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: + r"""Lists monitored resource descriptors that match a + filter. This method does not require a Workspace. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest`): + The request object. The + `ListMonitoredResourceDescriptors` request. + name (:class:`str`): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.metric_service.pagers.ListMonitoredResourceDescriptorsAsyncPager: + The ListMonitoredResourceDescriptors response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = metric_service.ListMonitoredResourceDescriptorsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_monitored_resource_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_monitored_resource_descriptor(self, + request: metric_service.GetMonitoredResourceDescriptorRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> monitored_resource_pb2.MonitoredResourceDescriptor: + r"""Gets a single monitored resource descriptor. This + method does not require a Workspace. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetMonitoredResourceDescriptorRequest`): + The request object. The `GetMonitoredResourceDescriptor` + request. + name (:class:`str`): + Required. The monitored resource descriptor to get. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/monitoredResourceDescriptors/[RESOURCE_TYPE] + + The ``[RESOURCE_TYPE]`` is a predefined type, such as + ``cloudsql_database``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.monitored_resource_pb2.MonitoredResourceDescriptor: + An object that describes the schema of a [MonitoredResource][google.api.MonitoredResource] object using a + type name and a set of labels. For example, the + monitored resource descriptor for Google Compute + Engine VM instances has a type of "gce_instance" and + specifies the use of the labels "instance_id" and + "zone" to identify particular VM instances. + + Different APIs can support different monitored + resource types. APIs generally provide a list method + that returns the monitored resource descriptors used + by the API. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = metric_service.GetMonitoredResourceDescriptorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_monitored_resource_descriptor, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_metric_descriptors(self, + request: metric_service.ListMetricDescriptorsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetricDescriptorsAsyncPager: + r"""Lists metric descriptors that match a filter. This + method does not require a Workspace. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest`): + The request object. The `ListMetricDescriptors` request. + name (:class:`str`): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.metric_service.pagers.ListMetricDescriptorsAsyncPager: + The ListMetricDescriptors response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = metric_service.ListMetricDescriptorsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_metric_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMetricDescriptorsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_metric_descriptor(self, + request: metric_service.GetMetricDescriptorRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metric_pb2.MetricDescriptor: + r"""Gets a single metric descriptor. This method does not + require a Workspace. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetMetricDescriptorRequest`): + The request object. The `GetMetricDescriptor` request. + name (:class:`str`): + Required. The metric descriptor on which to execute the + request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] + + An example value of ``[METRIC_ID]`` is + ``"compute.googleapis.com/instance/disk/read_bytes_count"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.metric_pb2.MetricDescriptor: + Defines a metric type and its schema. + Once a metric descriptor is created, + deleting or altering it stops data + collection and makes the metric type's + existing data unusable. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = metric_service.GetMetricDescriptorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_metric_descriptor, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_metric_descriptor(self, + request: metric_service.CreateMetricDescriptorRequest = None, + *, + name: str = None, + metric_descriptor: metric_pb2.MetricDescriptor = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metric_pb2.MetricDescriptor: + r"""Creates a new metric descriptor. User-created metric descriptors + define `custom + metrics `__. + + Args: + request (:class:`google.cloud.monitoring_v3.types.CreateMetricDescriptorRequest`): + The request object. The `CreateMetricDescriptor` + request. + name (:class:`str`): + Required. The + `project `__ + on which to execute the request. The format is: 4 + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric_descriptor (:class:`google.api.metric_pb2.MetricDescriptor`): + Required. The new `custom + metric `__ + descriptor. + + This corresponds to the ``metric_descriptor`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.metric_pb2.MetricDescriptor: + Defines a metric type and its schema. + Once a metric descriptor is created, + deleting or altering it stops data + collection and makes the metric type's + existing data unusable. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, metric_descriptor]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = metric_service.CreateMetricDescriptorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if metric_descriptor is not None: + request.metric_descriptor = metric_descriptor + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_metric_descriptor, + default_timeout=12.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_metric_descriptor(self, + request: metric_service.DeleteMetricDescriptorRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a metric descriptor. Only user-created `custom + metrics `__ + can be deleted. + + Args: + request (:class:`google.cloud.monitoring_v3.types.DeleteMetricDescriptorRequest`): + The request object. The `DeleteMetricDescriptor` + request. + name (:class:`str`): + Required. The metric descriptor on which to execute the + request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] + + An example of ``[METRIC_ID]`` is: + ``"custom.googleapis.com/my_test_metric"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = metric_service.DeleteMetricDescriptorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_metric_descriptor, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_time_series(self, + request: metric_service.ListTimeSeriesRequest = None, + *, + name: str = None, + filter: str = None, + interval: common.TimeInterval = None, + view: metric_service.ListTimeSeriesRequest.TimeSeriesView = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTimeSeriesAsyncPager: + r"""Lists time series that match a filter. This method + does not require a Workspace. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListTimeSeriesRequest`): + The request object. The `ListTimeSeries` request. + name (:class:`str`): + Required. The + `project `__, + organization or folder on which to execute the request. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + organizations/[ORGANIZATION_ID] + folders/[FOLDER_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Required. A `monitoring + filter `__ + that specifies which time series should be returned. The + filter must specify a single metric type, and can + additionally specify metric labels and other + information. For example: + + :: + + metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND + metric.labels.instance_name = "my-instance-name" + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interval (:class:`google.cloud.monitoring_v3.types.TimeInterval`): + Required. The time interval for which + results should be returned. Only time + series that contain data points in the + specified interval are included in the + response. + + This corresponds to the ``interval`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + view (:class:`google.cloud.monitoring_v3.types.ListTimeSeriesRequest.TimeSeriesView`): + Required. Specifies which information + is returned about the time series. + + This corresponds to the ``view`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.metric_service.pagers.ListTimeSeriesAsyncPager: + The ListTimeSeries response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, filter, interval, view]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = metric_service.ListTimeSeriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if filter is not None: + request.filter = filter + if interval is not None: + request.interval = interval + if view is not None: + request.view = view + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_time_series, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=90.0, + ), + default_timeout=90.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTimeSeriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_time_series(self, + request: metric_service.CreateTimeSeriesRequest = None, + *, + name: str = None, + time_series: Sequence[gm_metric.TimeSeries] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Creates or adds data to one or more time series. + The response is empty if all time series in the request + were written. If any time series could not be written, a + corresponding failure message is included in the error + response. + + Args: + request (:class:`google.cloud.monitoring_v3.types.CreateTimeSeriesRequest`): + The request object. The `CreateTimeSeries` request. + name (:class:`str`): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + time_series (:class:`Sequence[google.cloud.monitoring_v3.types.TimeSeries]`): + Required. The new data to be added to a list of time + series. Adds at most one data point to each of several + time series. The new data point must be more recent than + any other point in its time series. Each ``TimeSeries`` + value must fully specify a unique time series by + supplying all label values for the metric and the + monitored resource. + + The maximum number of ``TimeSeries`` objects per + ``Create`` request is 200. + + This corresponds to the ``time_series`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, time_series]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = metric_service.CreateTimeSeriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if time_series: + request.time_series.extend(time_series) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_time_series, + default_timeout=12.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "MetricServiceAsyncClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/client.py new file mode 100644 index 00000000..07a79178 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/client.py @@ -0,0 +1,1140 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.services.metric_service import pagers +from google.cloud.monitoring_v3.types import common +from google.cloud.monitoring_v3.types import metric as gm_metric +from google.cloud.monitoring_v3.types import metric_service +from .transports.base import MetricServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import MetricServiceGrpcTransport +from .transports.grpc_asyncio import MetricServiceGrpcAsyncIOTransport + + +class MetricServiceClientMeta(type): + """Metaclass for the MetricService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetricServiceTransport]] + _transport_registry["grpc"] = MetricServiceGrpcTransport + _transport_registry["grpc_asyncio"] = MetricServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[MetricServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetricServiceClient(metaclass=MetricServiceClientMeta): + """Manages metric descriptors, monitored resource descriptors, + and time series data. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "monitoring.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetricServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def metric_descriptor_path(project: str,) -> str: + """Returns a fully-qualified metric_descriptor string.""" + return "projects/{project}/metricDescriptors/{metric_descriptor=**}".format(project=project, ) + + @staticmethod + def parse_metric_descriptor_path(path: str) -> Dict[str,str]: + """Parses a metric_descriptor path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/metricDescriptors/{metric_descriptor=**}$", path) + return m.groupdict() if m else {} + + @staticmethod + def monitored_resource_descriptor_path(project: str,monitored_resource_descriptor: str,) -> str: + """Returns a fully-qualified monitored_resource_descriptor string.""" + return "projects/{project}/monitoredResourceDescriptors/{monitored_resource_descriptor}".format(project=project, monitored_resource_descriptor=monitored_resource_descriptor, ) + + @staticmethod + def parse_monitored_resource_descriptor_path(path: str) -> Dict[str,str]: + """Parses a monitored_resource_descriptor path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/monitoredResourceDescriptors/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def time_series_path(project: str,time_series: str,) -> str: + """Returns a fully-qualified time_series string.""" + return "projects/{project}/timeSeries/{time_series}".format(project=project, time_series=time_series, ) + + @staticmethod + def parse_time_series_path(path: str) -> Dict[str,str]: + """Parses a time_series path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/timeSeries/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MetricServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metric service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MetricServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MetricServiceTransport): + # transport is a MetricServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_monitored_resource_descriptors(self, + request: metric_service.ListMonitoredResourceDescriptorsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsPager: + r"""Lists monitored resource descriptors that match a + filter. This method does not require a Workspace. + + Args: + request (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest): + The request object. The + `ListMonitoredResourceDescriptors` request. + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.metric_service.pagers.ListMonitoredResourceDescriptorsPager: + The ListMonitoredResourceDescriptors response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a metric_service.ListMonitoredResourceDescriptorsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metric_service.ListMonitoredResourceDescriptorsRequest): + request = metric_service.ListMonitoredResourceDescriptorsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_monitored_resource_descriptors] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_monitored_resource_descriptor(self, + request: metric_service.GetMonitoredResourceDescriptorRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> monitored_resource_pb2.MonitoredResourceDescriptor: + r"""Gets a single monitored resource descriptor. This + method does not require a Workspace. + + Args: + request (google.cloud.monitoring_v3.types.GetMonitoredResourceDescriptorRequest): + The request object. The `GetMonitoredResourceDescriptor` + request. + name (str): + Required. The monitored resource descriptor to get. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/monitoredResourceDescriptors/[RESOURCE_TYPE] + + The ``[RESOURCE_TYPE]`` is a predefined type, such as + ``cloudsql_database``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.monitored_resource_pb2.MonitoredResourceDescriptor: + An object that describes the schema of a [MonitoredResource][google.api.MonitoredResource] object using a + type name and a set of labels. For example, the + monitored resource descriptor for Google Compute + Engine VM instances has a type of "gce_instance" and + specifies the use of the labels "instance_id" and + "zone" to identify particular VM instances. + + Different APIs can support different monitored + resource types. APIs generally provide a list method + that returns the monitored resource descriptors used + by the API. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a metric_service.GetMonitoredResourceDescriptorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metric_service.GetMonitoredResourceDescriptorRequest): + request = metric_service.GetMonitoredResourceDescriptorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_monitored_resource_descriptor] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_metric_descriptors(self, + request: metric_service.ListMetricDescriptorsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetricDescriptorsPager: + r"""Lists metric descriptors that match a filter. This + method does not require a Workspace. + + Args: + request (google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest): + The request object. The `ListMetricDescriptors` request. + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.metric_service.pagers.ListMetricDescriptorsPager: + The ListMetricDescriptors response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a metric_service.ListMetricDescriptorsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metric_service.ListMetricDescriptorsRequest): + request = metric_service.ListMetricDescriptorsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_metric_descriptors] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMetricDescriptorsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_metric_descriptor(self, + request: metric_service.GetMetricDescriptorRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metric_pb2.MetricDescriptor: + r"""Gets a single metric descriptor. This method does not + require a Workspace. + + Args: + request (google.cloud.monitoring_v3.types.GetMetricDescriptorRequest): + The request object. The `GetMetricDescriptor` request. + name (str): + Required. The metric descriptor on which to execute the + request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] + + An example value of ``[METRIC_ID]`` is + ``"compute.googleapis.com/instance/disk/read_bytes_count"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.metric_pb2.MetricDescriptor: + Defines a metric type and its schema. + Once a metric descriptor is created, + deleting or altering it stops data + collection and makes the metric type's + existing data unusable. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a metric_service.GetMetricDescriptorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metric_service.GetMetricDescriptorRequest): + request = metric_service.GetMetricDescriptorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_metric_descriptor] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_metric_descriptor(self, + request: metric_service.CreateMetricDescriptorRequest = None, + *, + name: str = None, + metric_descriptor: metric_pb2.MetricDescriptor = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metric_pb2.MetricDescriptor: + r"""Creates a new metric descriptor. User-created metric descriptors + define `custom + metrics `__. + + Args: + request (google.cloud.monitoring_v3.types.CreateMetricDescriptorRequest): + The request object. The `CreateMetricDescriptor` + request. + name (str): + Required. The + `project `__ + on which to execute the request. The format is: 4 + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric_descriptor (google.api.metric_pb2.MetricDescriptor): + Required. The new `custom + metric `__ + descriptor. + + This corresponds to the ``metric_descriptor`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.metric_pb2.MetricDescriptor: + Defines a metric type and its schema. + Once a metric descriptor is created, + deleting or altering it stops data + collection and makes the metric type's + existing data unusable. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, metric_descriptor]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a metric_service.CreateMetricDescriptorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metric_service.CreateMetricDescriptorRequest): + request = metric_service.CreateMetricDescriptorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if metric_descriptor is not None: + request.metric_descriptor = metric_descriptor + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_metric_descriptor] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_metric_descriptor(self, + request: metric_service.DeleteMetricDescriptorRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a metric descriptor. Only user-created `custom + metrics `__ + can be deleted. + + Args: + request (google.cloud.monitoring_v3.types.DeleteMetricDescriptorRequest): + The request object. The `DeleteMetricDescriptor` + request. + name (str): + Required. The metric descriptor on which to execute the + request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] + + An example of ``[METRIC_ID]`` is: + ``"custom.googleapis.com/my_test_metric"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a metric_service.DeleteMetricDescriptorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metric_service.DeleteMetricDescriptorRequest): + request = metric_service.DeleteMetricDescriptorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_metric_descriptor] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_time_series(self, + request: metric_service.ListTimeSeriesRequest = None, + *, + name: str = None, + filter: str = None, + interval: common.TimeInterval = None, + view: metric_service.ListTimeSeriesRequest.TimeSeriesView = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTimeSeriesPager: + r"""Lists time series that match a filter. This method + does not require a Workspace. + + Args: + request (google.cloud.monitoring_v3.types.ListTimeSeriesRequest): + The request object. The `ListTimeSeries` request. + name (str): + Required. The + `project `__, + organization or folder on which to execute the request. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + organizations/[ORGANIZATION_ID] + folders/[FOLDER_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + Required. A `monitoring + filter `__ + that specifies which time series should be returned. The + filter must specify a single metric type, and can + additionally specify metric labels and other + information. For example: + + :: + + metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND + metric.labels.instance_name = "my-instance-name" + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interval (google.cloud.monitoring_v3.types.TimeInterval): + Required. The time interval for which + results should be returned. Only time + series that contain data points in the + specified interval are included in the + response. + + This corresponds to the ``interval`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + view (google.cloud.monitoring_v3.types.ListTimeSeriesRequest.TimeSeriesView): + Required. Specifies which information + is returned about the time series. + + This corresponds to the ``view`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.metric_service.pagers.ListTimeSeriesPager: + The ListTimeSeries response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, filter, interval, view]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a metric_service.ListTimeSeriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metric_service.ListTimeSeriesRequest): + request = metric_service.ListTimeSeriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if filter is not None: + request.filter = filter + if interval is not None: + request.interval = interval + if view is not None: + request.view = view + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_time_series] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTimeSeriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_time_series(self, + request: metric_service.CreateTimeSeriesRequest = None, + *, + name: str = None, + time_series: Sequence[gm_metric.TimeSeries] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Creates or adds data to one or more time series. + The response is empty if all time series in the request + were written. If any time series could not be written, a + corresponding failure message is included in the error + response. + + Args: + request (google.cloud.monitoring_v3.types.CreateTimeSeriesRequest): + The request object. The `CreateTimeSeries` request. + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): + Required. The new data to be added to a list of time + series. Adds at most one data point to each of several + time series. The new data point must be more recent than + any other point in its time series. Each ``TimeSeries`` + value must fully specify a unique time series by + supplying all label values for the metric and the + monitored resource. + + The maximum number of ``TimeSeries`` objects per + ``Create`` request is 200. + + This corresponds to the ``time_series`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, time_series]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a metric_service.CreateTimeSeriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metric_service.CreateTimeSeriesRequest): + request = metric_service.CreateTimeSeriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if time_series is not None: + request.time_series = time_series + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_time_series] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "MetricServiceClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/pagers.py new file mode 100644 index 00000000..fee4ce44 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/pagers.py @@ -0,0 +1,387 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.types import metric as gm_metric +from google.cloud.monitoring_v3.types import metric_service + + +class ListMonitoredResourceDescriptorsPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metric_service.ListMonitoredResourceDescriptorsResponse], + request: metric_service.ListMonitoredResourceDescriptorsRequest, + response: metric_service.ListMonitoredResourceDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metric_service.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[metric_service.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + for page in self.pages: + yield from page.resource_descriptors + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMonitoredResourceDescriptorsAsyncPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metric_service.ListMonitoredResourceDescriptorsResponse]], + request: metric_service.ListMonitoredResourceDescriptorsRequest, + response: metric_service.ListMonitoredResourceDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metric_service.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[metric_service.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + async def async_generator(): + async for page in self.pages: + for response in page.resource_descriptors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMetricDescriptorsPager: + """A pager for iterating through ``list_metric_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metric_descriptors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMetricDescriptors`` requests and continue to iterate + through the ``metric_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metric_service.ListMetricDescriptorsResponse], + request: metric_service.ListMetricDescriptorsRequest, + response: metric_service.ListMetricDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metric_service.ListMetricDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[metric_service.ListMetricDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[metric_pb2.MetricDescriptor]: + for page in self.pages: + yield from page.metric_descriptors + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMetricDescriptorsAsyncPager: + """A pager for iterating through ``list_metric_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metric_descriptors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMetricDescriptors`` requests and continue to iterate + through the ``metric_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metric_service.ListMetricDescriptorsResponse]], + request: metric_service.ListMetricDescriptorsRequest, + response: metric_service.ListMetricDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metric_service.ListMetricDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[metric_service.ListMetricDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[metric_pb2.MetricDescriptor]: + async def async_generator(): + async for page in self.pages: + for response in page.metric_descriptors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTimeSeriesPager: + """A pager for iterating through ``list_time_series`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListTimeSeriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``time_series`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTimeSeries`` requests and continue to iterate + through the ``time_series`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListTimeSeriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metric_service.ListTimeSeriesResponse], + request: metric_service.ListTimeSeriesRequest, + response: metric_service.ListTimeSeriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListTimeSeriesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListTimeSeriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metric_service.ListTimeSeriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[metric_service.ListTimeSeriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[gm_metric.TimeSeries]: + for page in self.pages: + yield from page.time_series + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTimeSeriesAsyncPager: + """A pager for iterating through ``list_time_series`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListTimeSeriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``time_series`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTimeSeries`` requests and continue to iterate + through the ``time_series`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListTimeSeriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metric_service.ListTimeSeriesResponse]], + request: metric_service.ListTimeSeriesRequest, + response: metric_service.ListTimeSeriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListTimeSeriesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListTimeSeriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metric_service.ListTimeSeriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[metric_service.ListTimeSeriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[gm_metric.TimeSeries]: + async def async_generator(): + async for page in self.pages: + for response in page.time_series: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/__init__.py new file mode 100644 index 00000000..bbb43274 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetricServiceTransport +from .grpc import MetricServiceGrpcTransport +from .grpc_asyncio import MetricServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetricServiceTransport]] +_transport_registry['grpc'] = MetricServiceGrpcTransport +_transport_registry['grpc_asyncio'] = MetricServiceGrpcAsyncIOTransport + +__all__ = ( + 'MetricServiceTransport', + 'MetricServiceGrpcTransport', + 'MetricServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/base.py new file mode 100644 index 00000000..2c487de9 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/base.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.types import metric_service +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-monitoring', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class MetricServiceTransport(abc.ABC): + """Abstract transport class for MetricService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + 'https://www.googleapis.com/auth/monitoring.write', + ) + + DEFAULT_HOST: str = 'monitoring.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_monitored_resource_descriptors: gapic_v1.method.wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.get_monitored_resource_descriptor: gapic_v1.method.wrap_method( + self.get_monitored_resource_descriptor, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_metric_descriptors: gapic_v1.method.wrap_method( + self.list_metric_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.get_metric_descriptor: gapic_v1.method.wrap_method( + self.get_metric_descriptor, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.create_metric_descriptor: gapic_v1.method.wrap_method( + self.create_metric_descriptor, + default_timeout=12.0, + client_info=client_info, + ), + self.delete_metric_descriptor: gapic_v1.method.wrap_method( + self.delete_metric_descriptor, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_time_series: gapic_v1.method.wrap_method( + self.list_time_series, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=90.0, + ), + default_timeout=90.0, + client_info=client_info, + ), + self.create_time_series: gapic_v1.method.wrap_method( + self.create_time_series, + default_timeout=12.0, + client_info=client_info, + ), + } + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [metric_service.ListMonitoredResourceDescriptorsRequest], + Union[ + metric_service.ListMonitoredResourceDescriptorsResponse, + Awaitable[metric_service.ListMonitoredResourceDescriptorsResponse] + ]]: + raise NotImplementedError() + + @property + def get_monitored_resource_descriptor(self) -> Callable[ + [metric_service.GetMonitoredResourceDescriptorRequest], + Union[ + monitored_resource_pb2.MonitoredResourceDescriptor, + Awaitable[monitored_resource_pb2.MonitoredResourceDescriptor] + ]]: + raise NotImplementedError() + + @property + def list_metric_descriptors(self) -> Callable[ + [metric_service.ListMetricDescriptorsRequest], + Union[ + metric_service.ListMetricDescriptorsResponse, + Awaitable[metric_service.ListMetricDescriptorsResponse] + ]]: + raise NotImplementedError() + + @property + def get_metric_descriptor(self) -> Callable[ + [metric_service.GetMetricDescriptorRequest], + Union[ + metric_pb2.MetricDescriptor, + Awaitable[metric_pb2.MetricDescriptor] + ]]: + raise NotImplementedError() + + @property + def create_metric_descriptor(self) -> Callable[ + [metric_service.CreateMetricDescriptorRequest], + Union[ + metric_pb2.MetricDescriptor, + Awaitable[metric_pb2.MetricDescriptor] + ]]: + raise NotImplementedError() + + @property + def delete_metric_descriptor(self) -> Callable[ + [metric_service.DeleteMetricDescriptorRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_time_series(self) -> Callable[ + [metric_service.ListTimeSeriesRequest], + Union[ + metric_service.ListTimeSeriesResponse, + Awaitable[metric_service.ListTimeSeriesResponse] + ]]: + raise NotImplementedError() + + @property + def create_time_series(self) -> Callable[ + [metric_service.CreateTimeSeriesRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'MetricServiceTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py new file mode 100644 index 00000000..20143d19 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py @@ -0,0 +1,453 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.types import metric_service +from google.protobuf import empty_pb2 # type: ignore +from .base import MetricServiceTransport, DEFAULT_CLIENT_INFO + + +class MetricServiceGrpcTransport(MetricServiceTransport): + """gRPC backend transport for MetricService. + + Manages metric descriptors, monitored resource descriptors, + and time series data. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [metric_service.ListMonitoredResourceDescriptorsRequest], + metric_service.ListMonitoredResourceDescriptorsResponse]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists monitored resource descriptors that match a + filter. This method does not require a Workspace. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + ~.ListMonitoredResourceDescriptorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_monitored_resource_descriptors' not in self._stubs: + self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/ListMonitoredResourceDescriptors', + request_serializer=metric_service.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=metric_service.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs['list_monitored_resource_descriptors'] + + @property + def get_monitored_resource_descriptor(self) -> Callable[ + [metric_service.GetMonitoredResourceDescriptorRequest], + monitored_resource_pb2.MonitoredResourceDescriptor]: + r"""Return a callable for the get monitored resource + descriptor method over gRPC. + + Gets a single monitored resource descriptor. This + method does not require a Workspace. + + Returns: + Callable[[~.GetMonitoredResourceDescriptorRequest], + ~.MonitoredResourceDescriptor]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_monitored_resource_descriptor' not in self._stubs: + self._stubs['get_monitored_resource_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/GetMonitoredResourceDescriptor', + request_serializer=metric_service.GetMonitoredResourceDescriptorRequest.serialize, + response_deserializer=monitored_resource_pb2.MonitoredResourceDescriptor.FromString, + ) + return self._stubs['get_monitored_resource_descriptor'] + + @property + def list_metric_descriptors(self) -> Callable[ + [metric_service.ListMetricDescriptorsRequest], + metric_service.ListMetricDescriptorsResponse]: + r"""Return a callable for the list metric descriptors method over gRPC. + + Lists metric descriptors that match a filter. This + method does not require a Workspace. + + Returns: + Callable[[~.ListMetricDescriptorsRequest], + ~.ListMetricDescriptorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_metric_descriptors' not in self._stubs: + self._stubs['list_metric_descriptors'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/ListMetricDescriptors', + request_serializer=metric_service.ListMetricDescriptorsRequest.serialize, + response_deserializer=metric_service.ListMetricDescriptorsResponse.deserialize, + ) + return self._stubs['list_metric_descriptors'] + + @property + def get_metric_descriptor(self) -> Callable[ + [metric_service.GetMetricDescriptorRequest], + metric_pb2.MetricDescriptor]: + r"""Return a callable for the get metric descriptor method over gRPC. + + Gets a single metric descriptor. This method does not + require a Workspace. + + Returns: + Callable[[~.GetMetricDescriptorRequest], + ~.MetricDescriptor]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_metric_descriptor' not in self._stubs: + self._stubs['get_metric_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/GetMetricDescriptor', + request_serializer=metric_service.GetMetricDescriptorRequest.serialize, + response_deserializer=metric_pb2.MetricDescriptor.FromString, + ) + return self._stubs['get_metric_descriptor'] + + @property + def create_metric_descriptor(self) -> Callable[ + [metric_service.CreateMetricDescriptorRequest], + metric_pb2.MetricDescriptor]: + r"""Return a callable for the create metric descriptor method over gRPC. + + Creates a new metric descriptor. User-created metric descriptors + define `custom + metrics `__. + + Returns: + Callable[[~.CreateMetricDescriptorRequest], + ~.MetricDescriptor]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_metric_descriptor' not in self._stubs: + self._stubs['create_metric_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/CreateMetricDescriptor', + request_serializer=metric_service.CreateMetricDescriptorRequest.serialize, + response_deserializer=metric_pb2.MetricDescriptor.FromString, + ) + return self._stubs['create_metric_descriptor'] + + @property + def delete_metric_descriptor(self) -> Callable[ + [metric_service.DeleteMetricDescriptorRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete metric descriptor method over gRPC. + + Deletes a metric descriptor. Only user-created `custom + metrics `__ + can be deleted. + + Returns: + Callable[[~.DeleteMetricDescriptorRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_metric_descriptor' not in self._stubs: + self._stubs['delete_metric_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/DeleteMetricDescriptor', + request_serializer=metric_service.DeleteMetricDescriptorRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_metric_descriptor'] + + @property + def list_time_series(self) -> Callable[ + [metric_service.ListTimeSeriesRequest], + metric_service.ListTimeSeriesResponse]: + r"""Return a callable for the list time series method over gRPC. + + Lists time series that match a filter. This method + does not require a Workspace. + + Returns: + Callable[[~.ListTimeSeriesRequest], + ~.ListTimeSeriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_time_series' not in self._stubs: + self._stubs['list_time_series'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/ListTimeSeries', + request_serializer=metric_service.ListTimeSeriesRequest.serialize, + response_deserializer=metric_service.ListTimeSeriesResponse.deserialize, + ) + return self._stubs['list_time_series'] + + @property + def create_time_series(self) -> Callable[ + [metric_service.CreateTimeSeriesRequest], + empty_pb2.Empty]: + r"""Return a callable for the create time series method over gRPC. + + Creates or adds data to one or more time series. + The response is empty if all time series in the request + were written. If any time series could not be written, a + corresponding failure message is included in the error + response. + + Returns: + Callable[[~.CreateTimeSeriesRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_time_series' not in self._stubs: + self._stubs['create_time_series'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/CreateTimeSeries', + request_serializer=metric_service.CreateTimeSeriesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['create_time_series'] + + +__all__ = ( + 'MetricServiceGrpcTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..8fc56a06 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py @@ -0,0 +1,457 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.types import metric_service +from google.protobuf import empty_pb2 # type: ignore +from .base import MetricServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import MetricServiceGrpcTransport + + +class MetricServiceGrpcAsyncIOTransport(MetricServiceTransport): + """gRPC AsyncIO backend transport for MetricService. + + Manages metric descriptors, monitored resource descriptors, + and time series data. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [metric_service.ListMonitoredResourceDescriptorsRequest], + Awaitable[metric_service.ListMonitoredResourceDescriptorsResponse]]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists monitored resource descriptors that match a + filter. This method does not require a Workspace. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + Awaitable[~.ListMonitoredResourceDescriptorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_monitored_resource_descriptors' not in self._stubs: + self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/ListMonitoredResourceDescriptors', + request_serializer=metric_service.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=metric_service.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs['list_monitored_resource_descriptors'] + + @property + def get_monitored_resource_descriptor(self) -> Callable[ + [metric_service.GetMonitoredResourceDescriptorRequest], + Awaitable[monitored_resource_pb2.MonitoredResourceDescriptor]]: + r"""Return a callable for the get monitored resource + descriptor method over gRPC. + + Gets a single monitored resource descriptor. This + method does not require a Workspace. + + Returns: + Callable[[~.GetMonitoredResourceDescriptorRequest], + Awaitable[~.MonitoredResourceDescriptor]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_monitored_resource_descriptor' not in self._stubs: + self._stubs['get_monitored_resource_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/GetMonitoredResourceDescriptor', + request_serializer=metric_service.GetMonitoredResourceDescriptorRequest.serialize, + response_deserializer=monitored_resource_pb2.MonitoredResourceDescriptor.FromString, + ) + return self._stubs['get_monitored_resource_descriptor'] + + @property + def list_metric_descriptors(self) -> Callable[ + [metric_service.ListMetricDescriptorsRequest], + Awaitable[metric_service.ListMetricDescriptorsResponse]]: + r"""Return a callable for the list metric descriptors method over gRPC. + + Lists metric descriptors that match a filter. This + method does not require a Workspace. + + Returns: + Callable[[~.ListMetricDescriptorsRequest], + Awaitable[~.ListMetricDescriptorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_metric_descriptors' not in self._stubs: + self._stubs['list_metric_descriptors'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/ListMetricDescriptors', + request_serializer=metric_service.ListMetricDescriptorsRequest.serialize, + response_deserializer=metric_service.ListMetricDescriptorsResponse.deserialize, + ) + return self._stubs['list_metric_descriptors'] + + @property + def get_metric_descriptor(self) -> Callable[ + [metric_service.GetMetricDescriptorRequest], + Awaitable[metric_pb2.MetricDescriptor]]: + r"""Return a callable for the get metric descriptor method over gRPC. + + Gets a single metric descriptor. This method does not + require a Workspace. + + Returns: + Callable[[~.GetMetricDescriptorRequest], + Awaitable[~.MetricDescriptor]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_metric_descriptor' not in self._stubs: + self._stubs['get_metric_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/GetMetricDescriptor', + request_serializer=metric_service.GetMetricDescriptorRequest.serialize, + response_deserializer=metric_pb2.MetricDescriptor.FromString, + ) + return self._stubs['get_metric_descriptor'] + + @property + def create_metric_descriptor(self) -> Callable[ + [metric_service.CreateMetricDescriptorRequest], + Awaitable[metric_pb2.MetricDescriptor]]: + r"""Return a callable for the create metric descriptor method over gRPC. + + Creates a new metric descriptor. User-created metric descriptors + define `custom + metrics `__. + + Returns: + Callable[[~.CreateMetricDescriptorRequest], + Awaitable[~.MetricDescriptor]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_metric_descriptor' not in self._stubs: + self._stubs['create_metric_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/CreateMetricDescriptor', + request_serializer=metric_service.CreateMetricDescriptorRequest.serialize, + response_deserializer=metric_pb2.MetricDescriptor.FromString, + ) + return self._stubs['create_metric_descriptor'] + + @property + def delete_metric_descriptor(self) -> Callable[ + [metric_service.DeleteMetricDescriptorRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete metric descriptor method over gRPC. + + Deletes a metric descriptor. Only user-created `custom + metrics `__ + can be deleted. + + Returns: + Callable[[~.DeleteMetricDescriptorRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_metric_descriptor' not in self._stubs: + self._stubs['delete_metric_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/DeleteMetricDescriptor', + request_serializer=metric_service.DeleteMetricDescriptorRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_metric_descriptor'] + + @property + def list_time_series(self) -> Callable[ + [metric_service.ListTimeSeriesRequest], + Awaitable[metric_service.ListTimeSeriesResponse]]: + r"""Return a callable for the list time series method over gRPC. + + Lists time series that match a filter. This method + does not require a Workspace. + + Returns: + Callable[[~.ListTimeSeriesRequest], + Awaitable[~.ListTimeSeriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_time_series' not in self._stubs: + self._stubs['list_time_series'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/ListTimeSeries', + request_serializer=metric_service.ListTimeSeriesRequest.serialize, + response_deserializer=metric_service.ListTimeSeriesResponse.deserialize, + ) + return self._stubs['list_time_series'] + + @property + def create_time_series(self) -> Callable[ + [metric_service.CreateTimeSeriesRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the create time series method over gRPC. + + Creates or adds data to one or more time series. + The response is empty if all time series in the request + were written. If any time series could not be written, a + corresponding failure message is included in the error + response. + + Returns: + Callable[[~.CreateTimeSeriesRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_time_series' not in self._stubs: + self._stubs['create_time_series'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.MetricService/CreateTimeSeries', + request_serializer=metric_service.CreateTimeSeriesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['create_time_series'] + + +__all__ = ( + 'MetricServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/__init__.py new file mode 100644 index 00000000..fae0f0b3 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NotificationChannelServiceClient +from .async_client import NotificationChannelServiceAsyncClient + +__all__ = ( + 'NotificationChannelServiceClient', + 'NotificationChannelServiceAsyncClient', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py new file mode 100644 index 00000000..710ca1db --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py @@ -0,0 +1,1143 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.cloud.monitoring_v3.services.notification_channel_service import pagers +from google.cloud.monitoring_v3.types import mutation_record +from google.cloud.monitoring_v3.types import notification +from google.cloud.monitoring_v3.types import notification_service +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from .transports.base import NotificationChannelServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import NotificationChannelServiceGrpcAsyncIOTransport +from .client import NotificationChannelServiceClient + + +class NotificationChannelServiceAsyncClient: + """The Notification Channel API provides access to configuration + that controls how messages related to incidents are sent. + """ + + _client: NotificationChannelServiceClient + + DEFAULT_ENDPOINT = NotificationChannelServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = NotificationChannelServiceClient.DEFAULT_MTLS_ENDPOINT + + notification_channel_path = staticmethod(NotificationChannelServiceClient.notification_channel_path) + parse_notification_channel_path = staticmethod(NotificationChannelServiceClient.parse_notification_channel_path) + notification_channel_descriptor_path = staticmethod(NotificationChannelServiceClient.notification_channel_descriptor_path) + parse_notification_channel_descriptor_path = staticmethod(NotificationChannelServiceClient.parse_notification_channel_descriptor_path) + common_billing_account_path = staticmethod(NotificationChannelServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(NotificationChannelServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(NotificationChannelServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(NotificationChannelServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(NotificationChannelServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(NotificationChannelServiceClient.parse_common_organization_path) + common_project_path = staticmethod(NotificationChannelServiceClient.common_project_path) + parse_common_project_path = staticmethod(NotificationChannelServiceClient.parse_common_project_path) + common_location_path = staticmethod(NotificationChannelServiceClient.common_location_path) + parse_common_location_path = staticmethod(NotificationChannelServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotificationChannelServiceAsyncClient: The constructed client. + """ + return NotificationChannelServiceClient.from_service_account_info.__func__(NotificationChannelServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotificationChannelServiceAsyncClient: The constructed client. + """ + return NotificationChannelServiceClient.from_service_account_file.__func__(NotificationChannelServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NotificationChannelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + NotificationChannelServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(NotificationChannelServiceClient).get_transport_class, type(NotificationChannelServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, NotificationChannelServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the notification channel service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.NotificationChannelServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = NotificationChannelServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_notification_channel_descriptors(self, + request: notification_service.ListNotificationChannelDescriptorsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNotificationChannelDescriptorsAsyncPager: + r"""Lists the descriptors for supported channel types. + The use of descriptors makes it possible for new channel + types to be dynamically added. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest`): + The request object. The + `ListNotificationChannelDescriptors` request. + name (:class:`str`): + Required. The REST resource name of the parent from + which to retrieve the notification channel descriptors. + The expected syntax is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + Note that this + `names `__ + the parent container in which to look for the + descriptors; to retrieve a single descriptor by name, + use the + [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] + operation, instead. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.notification_channel_service.pagers.ListNotificationChannelDescriptorsAsyncPager: + The ListNotificationChannelDescriptors response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.ListNotificationChannelDescriptorsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_notification_channel_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNotificationChannelDescriptorsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_notification_channel_descriptor(self, + request: notification_service.GetNotificationChannelDescriptorRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannelDescriptor: + r"""Gets a single channel descriptor. The descriptor + indicates which fields are expected / permitted for a + notification channel of the given type. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetNotificationChannelDescriptorRequest`): + The request object. The + `GetNotificationChannelDescriptor` response. + name (:class:`str`): + Required. The channel type for which to execute the + request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannelDescriptors/[CHANNEL_TYPE] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannelDescriptor: + A description of a notification + channel. The descriptor includes the + properties of the channel and the set of + labels or fields that must be specified + to configure channels of a given type. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.GetNotificationChannelDescriptorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_notification_channel_descriptor, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_notification_channels(self, + request: notification_service.ListNotificationChannelsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNotificationChannelsAsyncPager: + r"""Lists the notification channels that have been + created for the project. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListNotificationChannelsRequest`): + The request object. The `ListNotificationChannels` + request. + name (:class:`str`): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This names the container in which to look for the + notification channels; it does not name a specific + channel. To query a specific channel by REST resource + name, use the + [``GetNotificationChannel``][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] + operation. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.notification_channel_service.pagers.ListNotificationChannelsAsyncPager: + The ListNotificationChannels response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.ListNotificationChannelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_notification_channels, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNotificationChannelsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_notification_channel(self, + request: notification_service.GetNotificationChannelRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannel: + r"""Gets a single notification channel. The channel + includes the relevant configuration details with which + the channel was created. However, the response may + truncate or omit passwords, API keys, or other private + key matter and thus the response may not be 100% + identical to the information that was supplied in the + call to the create method. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetNotificationChannelRequest`): + The request object. The `GetNotificationChannel` + request. + name (:class:`str`): + Required. The channel for which to execute the request. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannel: + A NotificationChannel is a medium through which an alert is + delivered when a policy violation is detected. + Examples of channels include email, SMS, and + third-party messaging applications. Fields containing + sensitive information like authentication tokens or + contact info are only partially populated on + retrieval. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.GetNotificationChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_notification_channel, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_notification_channel(self, + request: notification_service.CreateNotificationChannelRequest = None, + *, + name: str = None, + notification_channel: notification.NotificationChannel = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannel: + r"""Creates a new notification channel, representing a + single notification endpoint such as an email address, + SMS number, or PagerDuty service. + + Args: + request (:class:`google.cloud.monitoring_v3.types.CreateNotificationChannelRequest`): + The request object. The `CreateNotificationChannel` + request. + name (:class:`str`): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This names the container into which the channel will be + written, this does not name the newly created channel. + The resulting channel's name will have a normalized + version of this field as a prefix, but will add + ``/notificationChannels/[CHANNEL_ID]`` to identify the + channel. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notification_channel (:class:`google.cloud.monitoring_v3.types.NotificationChannel`): + Required. The definition of the ``NotificationChannel`` + to create. + + This corresponds to the ``notification_channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannel: + A NotificationChannel is a medium through which an alert is + delivered when a policy violation is detected. + Examples of channels include email, SMS, and + third-party messaging applications. Fields containing + sensitive information like authentication tokens or + contact info are only partially populated on + retrieval. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, notification_channel]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.CreateNotificationChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if notification_channel is not None: + request.notification_channel = notification_channel + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_notification_channel, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_notification_channel(self, + request: notification_service.UpdateNotificationChannelRequest = None, + *, + update_mask: field_mask_pb2.FieldMask = None, + notification_channel: notification.NotificationChannel = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannel: + r"""Updates a notification channel. Fields not specified + in the field mask remain unchanged. + + Args: + request (:class:`google.cloud.monitoring_v3.types.UpdateNotificationChannelRequest`): + The request object. The `UpdateNotificationChannel` + request. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notification_channel (:class:`google.cloud.monitoring_v3.types.NotificationChannel`): + Required. A description of the changes to be applied to + the specified notification channel. The description must + provide a definition for fields to be updated; the names + of these fields should also be included in the + ``update_mask``. + + This corresponds to the ``notification_channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannel: + A NotificationChannel is a medium through which an alert is + delivered when a policy violation is detected. + Examples of channels include email, SMS, and + third-party messaging applications. Fields containing + sensitive information like authentication tokens or + contact info are only partially populated on + retrieval. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([update_mask, notification_channel]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.UpdateNotificationChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if update_mask is not None: + request.update_mask = update_mask + if notification_channel is not None: + request.notification_channel = notification_channel + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_notification_channel, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("notification_channel.name", request.notification_channel.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_notification_channel(self, + request: notification_service.DeleteNotificationChannelRequest = None, + *, + name: str = None, + force: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a notification channel. + + Args: + request (:class:`google.cloud.monitoring_v3.types.DeleteNotificationChannelRequest`): + The request object. The `DeleteNotificationChannel` + request. + name (:class:`str`): + Required. The channel for which to execute the request. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (:class:`bool`): + If true, the notification channel + will be deleted regardless of its use in + alert policies (the policies will be + updated to remove the channel). If + false, channels that are still + referenced by an existing alerting + policy will fail to be deleted in a + delete operation. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.DeleteNotificationChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_notification_channel, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def send_notification_channel_verification_code(self, + request: notification_service.SendNotificationChannelVerificationCodeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Causes a verification code to be delivered to the channel. The + code can then be supplied in ``VerifyNotificationChannel`` to + verify the channel. + + Args: + request (:class:`google.cloud.monitoring_v3.types.SendNotificationChannelVerificationCodeRequest`): + The request object. The + `SendNotificationChannelVerificationCode` request. + name (:class:`str`): + Required. The notification channel to + which to send a verification code. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.SendNotificationChannelVerificationCodeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.send_notification_channel_verification_code, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_notification_channel_verification_code(self, + request: notification_service.GetNotificationChannelVerificationCodeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification_service.GetNotificationChannelVerificationCodeResponse: + r"""Requests a verification code for an already verified + channel that can then be used in a call to + VerifyNotificationChannel() on a different channel with + an equivalent identity in the same or in a different + project. This makes it possible to copy a channel + between projects without requiring manual reverification + of the channel. If the channel is not in the verified + state, this method will fail (in other words, this may + only be used if the + SendNotificationChannelVerificationCode and + VerifyNotificationChannel paths have already been used + to put the given channel into the verified state). + + There is no guarantee that the verification codes + returned by this method will be of a similar structure + or form as the ones that are delivered to the channel + via SendNotificationChannelVerificationCode; while + VerifyNotificationChannel() will recognize both the + codes delivered via + SendNotificationChannelVerificationCode() and returned + from GetNotificationChannelVerificationCode(), it is + typically the case that the verification codes delivered + via + SendNotificationChannelVerificationCode() will be + shorter and also have a shorter expiration (e.g. codes + such as "G-123456") whereas GetVerificationCode() will + typically return a much longer, websafe base 64 encoded + string that has a longer expiration time. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeRequest`): + The request object. The + `GetNotificationChannelVerificationCode` request. + name (:class:`str`): + Required. The notification channel + for which a verification code is to be + generated and retrieved. This must name + a channel that is already verified; if + the specified channel is not verified, + the request will fail. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeResponse: + The GetNotificationChannelVerificationCode request. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.GetNotificationChannelVerificationCodeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_notification_channel_verification_code, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def verify_notification_channel(self, + request: notification_service.VerifyNotificationChannelRequest = None, + *, + name: str = None, + code: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannel: + r"""Verifies a ``NotificationChannel`` by proving receipt of the + code delivered to the channel as a result of calling + ``SendNotificationChannelVerificationCode``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.VerifyNotificationChannelRequest`): + The request object. The `VerifyNotificationChannel` + request. + name (:class:`str`): + Required. The notification channel to + verify. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + code (:class:`str`): + Required. The verification code that was delivered to + the channel as a result of invoking the + ``SendNotificationChannelVerificationCode`` API method + or that was retrieved from a verified channel via + ``GetNotificationChannelVerificationCode``. For example, + one might have "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" + (in general, one is only guaranteed that the code is + valid UTF-8; one should not make any assumptions + regarding the structure or format of the code). + + This corresponds to the ``code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannel: + A NotificationChannel is a medium through which an alert is + delivered when a policy violation is detected. + Examples of channels include email, SMS, and + third-party messaging applications. Fields containing + sensitive information like authentication tokens or + contact info are only partially populated on + retrieval. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, code]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = notification_service.VerifyNotificationChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if code is not None: + request.code = code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.verify_notification_channel, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "NotificationChannelServiceAsyncClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/client.py new file mode 100644 index 00000000..5ce3fa51 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/client.py @@ -0,0 +1,1301 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.cloud.monitoring_v3.services.notification_channel_service import pagers +from google.cloud.monitoring_v3.types import mutation_record +from google.cloud.monitoring_v3.types import notification +from google.cloud.monitoring_v3.types import notification_service +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from .transports.base import NotificationChannelServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import NotificationChannelServiceGrpcTransport +from .transports.grpc_asyncio import NotificationChannelServiceGrpcAsyncIOTransport + + +class NotificationChannelServiceClientMeta(type): + """Metaclass for the NotificationChannelService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[NotificationChannelServiceTransport]] + _transport_registry["grpc"] = NotificationChannelServiceGrpcTransport + _transport_registry["grpc_asyncio"] = NotificationChannelServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[NotificationChannelServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NotificationChannelServiceClient(metaclass=NotificationChannelServiceClientMeta): + """The Notification Channel API provides access to configuration + that controls how messages related to incidents are sent. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "monitoring.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotificationChannelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotificationChannelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NotificationChannelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + NotificationChannelServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def notification_channel_path(project: str,notification_channel: str,) -> str: + """Returns a fully-qualified notification_channel string.""" + return "projects/{project}/notificationChannels/{notification_channel}".format(project=project, notification_channel=notification_channel, ) + + @staticmethod + def parse_notification_channel_path(path: str) -> Dict[str,str]: + """Parses a notification_channel path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/notificationChannels/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def notification_channel_descriptor_path(project: str,channel_descriptor: str,) -> str: + """Returns a fully-qualified notification_channel_descriptor string.""" + return "projects/{project}/notificationChannelDescriptors/{channel_descriptor}".format(project=project, channel_descriptor=channel_descriptor, ) + + @staticmethod + def parse_notification_channel_descriptor_path(path: str) -> Dict[str,str]: + """Parses a notification_channel_descriptor path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/notificationChannelDescriptors/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, NotificationChannelServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the notification channel service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NotificationChannelServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NotificationChannelServiceTransport): + # transport is a NotificationChannelServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_notification_channel_descriptors(self, + request: notification_service.ListNotificationChannelDescriptorsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNotificationChannelDescriptorsPager: + r"""Lists the descriptors for supported channel types. + The use of descriptors makes it possible for new channel + types to be dynamically added. + + Args: + request (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest): + The request object. The + `ListNotificationChannelDescriptors` request. + name (str): + Required. The REST resource name of the parent from + which to retrieve the notification channel descriptors. + The expected syntax is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + Note that this + `names `__ + the parent container in which to look for the + descriptors; to retrieve a single descriptor by name, + use the + [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] + operation, instead. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.notification_channel_service.pagers.ListNotificationChannelDescriptorsPager: + The ListNotificationChannelDescriptors response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.ListNotificationChannelDescriptorsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.ListNotificationChannelDescriptorsRequest): + request = notification_service.ListNotificationChannelDescriptorsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_notification_channel_descriptors] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNotificationChannelDescriptorsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_notification_channel_descriptor(self, + request: notification_service.GetNotificationChannelDescriptorRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannelDescriptor: + r"""Gets a single channel descriptor. The descriptor + indicates which fields are expected / permitted for a + notification channel of the given type. + + Args: + request (google.cloud.monitoring_v3.types.GetNotificationChannelDescriptorRequest): + The request object. The + `GetNotificationChannelDescriptor` response. + name (str): + Required. The channel type for which to execute the + request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannelDescriptors/[CHANNEL_TYPE] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannelDescriptor: + A description of a notification + channel. The descriptor includes the + properties of the channel and the set of + labels or fields that must be specified + to configure channels of a given type. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.GetNotificationChannelDescriptorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.GetNotificationChannelDescriptorRequest): + request = notification_service.GetNotificationChannelDescriptorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_notification_channel_descriptor] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_notification_channels(self, + request: notification_service.ListNotificationChannelsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNotificationChannelsPager: + r"""Lists the notification channels that have been + created for the project. + + Args: + request (google.cloud.monitoring_v3.types.ListNotificationChannelsRequest): + The request object. The `ListNotificationChannels` + request. + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This names the container in which to look for the + notification channels; it does not name a specific + channel. To query a specific channel by REST resource + name, use the + [``GetNotificationChannel``][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] + operation. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.notification_channel_service.pagers.ListNotificationChannelsPager: + The ListNotificationChannels response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.ListNotificationChannelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.ListNotificationChannelsRequest): + request = notification_service.ListNotificationChannelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_notification_channels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNotificationChannelsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_notification_channel(self, + request: notification_service.GetNotificationChannelRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannel: + r"""Gets a single notification channel. The channel + includes the relevant configuration details with which + the channel was created. However, the response may + truncate or omit passwords, API keys, or other private + key matter and thus the response may not be 100% + identical to the information that was supplied in the + call to the create method. + + Args: + request (google.cloud.monitoring_v3.types.GetNotificationChannelRequest): + The request object. The `GetNotificationChannel` + request. + name (str): + Required. The channel for which to execute the request. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannel: + A NotificationChannel is a medium through which an alert is + delivered when a policy violation is detected. + Examples of channels include email, SMS, and + third-party messaging applications. Fields containing + sensitive information like authentication tokens or + contact info are only partially populated on + retrieval. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.GetNotificationChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.GetNotificationChannelRequest): + request = notification_service.GetNotificationChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_notification_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_notification_channel(self, + request: notification_service.CreateNotificationChannelRequest = None, + *, + name: str = None, + notification_channel: notification.NotificationChannel = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannel: + r"""Creates a new notification channel, representing a + single notification endpoint such as an email address, + SMS number, or PagerDuty service. + + Args: + request (google.cloud.monitoring_v3.types.CreateNotificationChannelRequest): + The request object. The `CreateNotificationChannel` + request. + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This names the container into which the channel will be + written, this does not name the newly created channel. + The resulting channel's name will have a normalized + version of this field as a prefix, but will add + ``/notificationChannels/[CHANNEL_ID]`` to identify the + channel. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notification_channel (google.cloud.monitoring_v3.types.NotificationChannel): + Required. The definition of the ``NotificationChannel`` + to create. + + This corresponds to the ``notification_channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannel: + A NotificationChannel is a medium through which an alert is + delivered when a policy violation is detected. + Examples of channels include email, SMS, and + third-party messaging applications. Fields containing + sensitive information like authentication tokens or + contact info are only partially populated on + retrieval. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, notification_channel]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.CreateNotificationChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.CreateNotificationChannelRequest): + request = notification_service.CreateNotificationChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if notification_channel is not None: + request.notification_channel = notification_channel + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_notification_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_notification_channel(self, + request: notification_service.UpdateNotificationChannelRequest = None, + *, + update_mask: field_mask_pb2.FieldMask = None, + notification_channel: notification.NotificationChannel = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannel: + r"""Updates a notification channel. Fields not specified + in the field mask remain unchanged. + + Args: + request (google.cloud.monitoring_v3.types.UpdateNotificationChannelRequest): + The request object. The `UpdateNotificationChannel` + request. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notification_channel (google.cloud.monitoring_v3.types.NotificationChannel): + Required. A description of the changes to be applied to + the specified notification channel. The description must + provide a definition for fields to be updated; the names + of these fields should also be included in the + ``update_mask``. + + This corresponds to the ``notification_channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannel: + A NotificationChannel is a medium through which an alert is + delivered when a policy violation is detected. + Examples of channels include email, SMS, and + third-party messaging applications. Fields containing + sensitive information like authentication tokens or + contact info are only partially populated on + retrieval. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([update_mask, notification_channel]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.UpdateNotificationChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.UpdateNotificationChannelRequest): + request = notification_service.UpdateNotificationChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if update_mask is not None: + request.update_mask = update_mask + if notification_channel is not None: + request.notification_channel = notification_channel + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_notification_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("notification_channel.name", request.notification_channel.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_notification_channel(self, + request: notification_service.DeleteNotificationChannelRequest = None, + *, + name: str = None, + force: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a notification channel. + + Args: + request (google.cloud.monitoring_v3.types.DeleteNotificationChannelRequest): + The request object. The `DeleteNotificationChannel` + request. + name (str): + Required. The channel for which to execute the request. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (bool): + If true, the notification channel + will be deleted regardless of its use in + alert policies (the policies will be + updated to remove the channel). If + false, channels that are still + referenced by an existing alerting + policy will fail to be deleted in a + delete operation. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.DeleteNotificationChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.DeleteNotificationChannelRequest): + request = notification_service.DeleteNotificationChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_notification_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def send_notification_channel_verification_code(self, + request: notification_service.SendNotificationChannelVerificationCodeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Causes a verification code to be delivered to the channel. The + code can then be supplied in ``VerifyNotificationChannel`` to + verify the channel. + + Args: + request (google.cloud.monitoring_v3.types.SendNotificationChannelVerificationCodeRequest): + The request object. The + `SendNotificationChannelVerificationCode` request. + name (str): + Required. The notification channel to + which to send a verification code. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.SendNotificationChannelVerificationCodeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.SendNotificationChannelVerificationCodeRequest): + request = notification_service.SendNotificationChannelVerificationCodeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.send_notification_channel_verification_code] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_notification_channel_verification_code(self, + request: notification_service.GetNotificationChannelVerificationCodeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification_service.GetNotificationChannelVerificationCodeResponse: + r"""Requests a verification code for an already verified + channel that can then be used in a call to + VerifyNotificationChannel() on a different channel with + an equivalent identity in the same or in a different + project. This makes it possible to copy a channel + between projects without requiring manual reverification + of the channel. If the channel is not in the verified + state, this method will fail (in other words, this may + only be used if the + SendNotificationChannelVerificationCode and + VerifyNotificationChannel paths have already been used + to put the given channel into the verified state). + + There is no guarantee that the verification codes + returned by this method will be of a similar structure + or form as the ones that are delivered to the channel + via SendNotificationChannelVerificationCode; while + VerifyNotificationChannel() will recognize both the + codes delivered via + SendNotificationChannelVerificationCode() and returned + from GetNotificationChannelVerificationCode(), it is + typically the case that the verification codes delivered + via + SendNotificationChannelVerificationCode() will be + shorter and also have a shorter expiration (e.g. codes + such as "G-123456") whereas GetVerificationCode() will + typically return a much longer, websafe base 64 encoded + string that has a longer expiration time. + + Args: + request (google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeRequest): + The request object. The + `GetNotificationChannelVerificationCode` request. + name (str): + Required. The notification channel + for which a verification code is to be + generated and retrieved. This must name + a channel that is already verified; if + the specified channel is not verified, + the request will fail. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeResponse: + The GetNotificationChannelVerificationCode request. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.GetNotificationChannelVerificationCodeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.GetNotificationChannelVerificationCodeRequest): + request = notification_service.GetNotificationChannelVerificationCodeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_notification_channel_verification_code] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def verify_notification_channel(self, + request: notification_service.VerifyNotificationChannelRequest = None, + *, + name: str = None, + code: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> notification.NotificationChannel: + r"""Verifies a ``NotificationChannel`` by proving receipt of the + code delivered to the channel as a result of calling + ``SendNotificationChannelVerificationCode``. + + Args: + request (google.cloud.monitoring_v3.types.VerifyNotificationChannelRequest): + The request object. The `VerifyNotificationChannel` + request. + name (str): + Required. The notification channel to + verify. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + code (str): + Required. The verification code that was delivered to + the channel as a result of invoking the + ``SendNotificationChannelVerificationCode`` API method + or that was retrieved from a verified channel via + ``GetNotificationChannelVerificationCode``. For example, + one might have "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" + (in general, one is only guaranteed that the code is + valid UTF-8; one should not make any assumptions + regarding the structure or format of the code). + + This corresponds to the ``code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.NotificationChannel: + A NotificationChannel is a medium through which an alert is + delivered when a policy violation is detected. + Examples of channels include email, SMS, and + third-party messaging applications. Fields containing + sensitive information like authentication tokens or + contact info are only partially populated on + retrieval. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, code]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a notification_service.VerifyNotificationChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, notification_service.VerifyNotificationChannelRequest): + request = notification_service.VerifyNotificationChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if code is not None: + request.code = code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.verify_notification_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "NotificationChannelServiceClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/pagers.py new file mode 100644 index 00000000..82f80120 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/pagers.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.monitoring_v3.types import notification +from google.cloud.monitoring_v3.types import notification_service + + +class ListNotificationChannelDescriptorsPager: + """A pager for iterating through ``list_notification_channel_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``channel_descriptors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNotificationChannelDescriptors`` requests and continue to iterate + through the ``channel_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., notification_service.ListNotificationChannelDescriptorsResponse], + request: notification_service.ListNotificationChannelDescriptorsRequest, + response: notification_service.ListNotificationChannelDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = notification_service.ListNotificationChannelDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[notification_service.ListNotificationChannelDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[notification.NotificationChannelDescriptor]: + for page in self.pages: + yield from page.channel_descriptors + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListNotificationChannelDescriptorsAsyncPager: + """A pager for iterating through ``list_notification_channel_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``channel_descriptors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListNotificationChannelDescriptors`` requests and continue to iterate + through the ``channel_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[notification_service.ListNotificationChannelDescriptorsResponse]], + request: notification_service.ListNotificationChannelDescriptorsRequest, + response: notification_service.ListNotificationChannelDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = notification_service.ListNotificationChannelDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[notification_service.ListNotificationChannelDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[notification.NotificationChannelDescriptor]: + async def async_generator(): + async for page in self.pages: + for response in page.channel_descriptors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListNotificationChannelsPager: + """A pager for iterating through ``list_notification_channels`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListNotificationChannelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``notification_channels`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNotificationChannels`` requests and continue to iterate + through the ``notification_channels`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListNotificationChannelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., notification_service.ListNotificationChannelsResponse], + request: notification_service.ListNotificationChannelsRequest, + response: notification_service.ListNotificationChannelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListNotificationChannelsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListNotificationChannelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = notification_service.ListNotificationChannelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[notification_service.ListNotificationChannelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[notification.NotificationChannel]: + for page in self.pages: + yield from page.notification_channels + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListNotificationChannelsAsyncPager: + """A pager for iterating through ``list_notification_channels`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListNotificationChannelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``notification_channels`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListNotificationChannels`` requests and continue to iterate + through the ``notification_channels`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListNotificationChannelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[notification_service.ListNotificationChannelsResponse]], + request: notification_service.ListNotificationChannelsRequest, + response: notification_service.ListNotificationChannelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListNotificationChannelsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListNotificationChannelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = notification_service.ListNotificationChannelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[notification_service.ListNotificationChannelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[notification.NotificationChannel]: + async def async_generator(): + async for page in self.pages: + for response in page.notification_channels: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/__init__.py new file mode 100644 index 00000000..363051c8 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NotificationChannelServiceTransport +from .grpc import NotificationChannelServiceGrpcTransport +from .grpc_asyncio import NotificationChannelServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NotificationChannelServiceTransport]] +_transport_registry['grpc'] = NotificationChannelServiceGrpcTransport +_transport_registry['grpc_asyncio'] = NotificationChannelServiceGrpcAsyncIOTransport + +__all__ = ( + 'NotificationChannelServiceTransport', + 'NotificationChannelServiceGrpcTransport', + 'NotificationChannelServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py new file mode 100644 index 00000000..046be367 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py @@ -0,0 +1,340 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.types import notification +from google.cloud.monitoring_v3.types import notification_service +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-monitoring', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class NotificationChannelServiceTransport(abc.ABC): + """Abstract transport class for NotificationChannelService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ) + + DEFAULT_HOST: str = 'monitoring.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_notification_channel_descriptors: gapic_v1.method.wrap_method( + self.list_notification_channel_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.get_notification_channel_descriptor: gapic_v1.method.wrap_method( + self.get_notification_channel_descriptor, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_notification_channels: gapic_v1.method.wrap_method( + self.list_notification_channels, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.get_notification_channel: gapic_v1.method.wrap_method( + self.get_notification_channel, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.create_notification_channel: gapic_v1.method.wrap_method( + self.create_notification_channel, + default_timeout=30.0, + client_info=client_info, + ), + self.update_notification_channel: gapic_v1.method.wrap_method( + self.update_notification_channel, + default_timeout=30.0, + client_info=client_info, + ), + self.delete_notification_channel: gapic_v1.method.wrap_method( + self.delete_notification_channel, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.send_notification_channel_verification_code: gapic_v1.method.wrap_method( + self.send_notification_channel_verification_code, + default_timeout=30.0, + client_info=client_info, + ), + self.get_notification_channel_verification_code: gapic_v1.method.wrap_method( + self.get_notification_channel_verification_code, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.verify_notification_channel: gapic_v1.method.wrap_method( + self.verify_notification_channel, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + } + + @property + def list_notification_channel_descriptors(self) -> Callable[ + [notification_service.ListNotificationChannelDescriptorsRequest], + Union[ + notification_service.ListNotificationChannelDescriptorsResponse, + Awaitable[notification_service.ListNotificationChannelDescriptorsResponse] + ]]: + raise NotImplementedError() + + @property + def get_notification_channel_descriptor(self) -> Callable[ + [notification_service.GetNotificationChannelDescriptorRequest], + Union[ + notification.NotificationChannelDescriptor, + Awaitable[notification.NotificationChannelDescriptor] + ]]: + raise NotImplementedError() + + @property + def list_notification_channels(self) -> Callable[ + [notification_service.ListNotificationChannelsRequest], + Union[ + notification_service.ListNotificationChannelsResponse, + Awaitable[notification_service.ListNotificationChannelsResponse] + ]]: + raise NotImplementedError() + + @property + def get_notification_channel(self) -> Callable[ + [notification_service.GetNotificationChannelRequest], + Union[ + notification.NotificationChannel, + Awaitable[notification.NotificationChannel] + ]]: + raise NotImplementedError() + + @property + def create_notification_channel(self) -> Callable[ + [notification_service.CreateNotificationChannelRequest], + Union[ + notification.NotificationChannel, + Awaitable[notification.NotificationChannel] + ]]: + raise NotImplementedError() + + @property + def update_notification_channel(self) -> Callable[ + [notification_service.UpdateNotificationChannelRequest], + Union[ + notification.NotificationChannel, + Awaitable[notification.NotificationChannel] + ]]: + raise NotImplementedError() + + @property + def delete_notification_channel(self) -> Callable[ + [notification_service.DeleteNotificationChannelRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def send_notification_channel_verification_code(self) -> Callable[ + [notification_service.SendNotificationChannelVerificationCodeRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_notification_channel_verification_code(self) -> Callable[ + [notification_service.GetNotificationChannelVerificationCodeRequest], + Union[ + notification_service.GetNotificationChannelVerificationCodeResponse, + Awaitable[notification_service.GetNotificationChannelVerificationCodeResponse] + ]]: + raise NotImplementedError() + + @property + def verify_notification_channel(self) -> Callable[ + [notification_service.VerifyNotificationChannelRequest], + Union[ + notification.NotificationChannel, + Awaitable[notification.NotificationChannel] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'NotificationChannelServiceTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py new file mode 100644 index 00000000..b29d26ab --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py @@ -0,0 +1,538 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.monitoring_v3.types import notification +from google.cloud.monitoring_v3.types import notification_service +from google.protobuf import empty_pb2 # type: ignore +from .base import NotificationChannelServiceTransport, DEFAULT_CLIENT_INFO + + +class NotificationChannelServiceGrpcTransport(NotificationChannelServiceTransport): + """gRPC backend transport for NotificationChannelService. + + The Notification Channel API provides access to configuration + that controls how messages related to incidents are sent. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_notification_channel_descriptors(self) -> Callable[ + [notification_service.ListNotificationChannelDescriptorsRequest], + notification_service.ListNotificationChannelDescriptorsResponse]: + r"""Return a callable for the list notification channel + descriptors method over gRPC. + + Lists the descriptors for supported channel types. + The use of descriptors makes it possible for new channel + types to be dynamically added. + + Returns: + Callable[[~.ListNotificationChannelDescriptorsRequest], + ~.ListNotificationChannelDescriptorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_notification_channel_descriptors' not in self._stubs: + self._stubs['list_notification_channel_descriptors'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/ListNotificationChannelDescriptors', + request_serializer=notification_service.ListNotificationChannelDescriptorsRequest.serialize, + response_deserializer=notification_service.ListNotificationChannelDescriptorsResponse.deserialize, + ) + return self._stubs['list_notification_channel_descriptors'] + + @property + def get_notification_channel_descriptor(self) -> Callable[ + [notification_service.GetNotificationChannelDescriptorRequest], + notification.NotificationChannelDescriptor]: + r"""Return a callable for the get notification channel + descriptor method over gRPC. + + Gets a single channel descriptor. The descriptor + indicates which fields are expected / permitted for a + notification channel of the given type. + + Returns: + Callable[[~.GetNotificationChannelDescriptorRequest], + ~.NotificationChannelDescriptor]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_notification_channel_descriptor' not in self._stubs: + self._stubs['get_notification_channel_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/GetNotificationChannelDescriptor', + request_serializer=notification_service.GetNotificationChannelDescriptorRequest.serialize, + response_deserializer=notification.NotificationChannelDescriptor.deserialize, + ) + return self._stubs['get_notification_channel_descriptor'] + + @property + def list_notification_channels(self) -> Callable[ + [notification_service.ListNotificationChannelsRequest], + notification_service.ListNotificationChannelsResponse]: + r"""Return a callable for the list notification channels method over gRPC. + + Lists the notification channels that have been + created for the project. + + Returns: + Callable[[~.ListNotificationChannelsRequest], + ~.ListNotificationChannelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_notification_channels' not in self._stubs: + self._stubs['list_notification_channels'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/ListNotificationChannels', + request_serializer=notification_service.ListNotificationChannelsRequest.serialize, + response_deserializer=notification_service.ListNotificationChannelsResponse.deserialize, + ) + return self._stubs['list_notification_channels'] + + @property + def get_notification_channel(self) -> Callable[ + [notification_service.GetNotificationChannelRequest], + notification.NotificationChannel]: + r"""Return a callable for the get notification channel method over gRPC. + + Gets a single notification channel. The channel + includes the relevant configuration details with which + the channel was created. However, the response may + truncate or omit passwords, API keys, or other private + key matter and thus the response may not be 100% + identical to the information that was supplied in the + call to the create method. + + Returns: + Callable[[~.GetNotificationChannelRequest], + ~.NotificationChannel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_notification_channel' not in self._stubs: + self._stubs['get_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/GetNotificationChannel', + request_serializer=notification_service.GetNotificationChannelRequest.serialize, + response_deserializer=notification.NotificationChannel.deserialize, + ) + return self._stubs['get_notification_channel'] + + @property + def create_notification_channel(self) -> Callable[ + [notification_service.CreateNotificationChannelRequest], + notification.NotificationChannel]: + r"""Return a callable for the create notification channel method over gRPC. + + Creates a new notification channel, representing a + single notification endpoint such as an email address, + SMS number, or PagerDuty service. + + Returns: + Callable[[~.CreateNotificationChannelRequest], + ~.NotificationChannel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_notification_channel' not in self._stubs: + self._stubs['create_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/CreateNotificationChannel', + request_serializer=notification_service.CreateNotificationChannelRequest.serialize, + response_deserializer=notification.NotificationChannel.deserialize, + ) + return self._stubs['create_notification_channel'] + + @property + def update_notification_channel(self) -> Callable[ + [notification_service.UpdateNotificationChannelRequest], + notification.NotificationChannel]: + r"""Return a callable for the update notification channel method over gRPC. + + Updates a notification channel. Fields not specified + in the field mask remain unchanged. + + Returns: + Callable[[~.UpdateNotificationChannelRequest], + ~.NotificationChannel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_notification_channel' not in self._stubs: + self._stubs['update_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/UpdateNotificationChannel', + request_serializer=notification_service.UpdateNotificationChannelRequest.serialize, + response_deserializer=notification.NotificationChannel.deserialize, + ) + return self._stubs['update_notification_channel'] + + @property + def delete_notification_channel(self) -> Callable[ + [notification_service.DeleteNotificationChannelRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete notification channel method over gRPC. + + Deletes a notification channel. + + Returns: + Callable[[~.DeleteNotificationChannelRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_notification_channel' not in self._stubs: + self._stubs['delete_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/DeleteNotificationChannel', + request_serializer=notification_service.DeleteNotificationChannelRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_notification_channel'] + + @property + def send_notification_channel_verification_code(self) -> Callable[ + [notification_service.SendNotificationChannelVerificationCodeRequest], + empty_pb2.Empty]: + r"""Return a callable for the send notification channel + verification code method over gRPC. + + Causes a verification code to be delivered to the channel. The + code can then be supplied in ``VerifyNotificationChannel`` to + verify the channel. + + Returns: + Callable[[~.SendNotificationChannelVerificationCodeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'send_notification_channel_verification_code' not in self._stubs: + self._stubs['send_notification_channel_verification_code'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/SendNotificationChannelVerificationCode', + request_serializer=notification_service.SendNotificationChannelVerificationCodeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['send_notification_channel_verification_code'] + + @property + def get_notification_channel_verification_code(self) -> Callable[ + [notification_service.GetNotificationChannelVerificationCodeRequest], + notification_service.GetNotificationChannelVerificationCodeResponse]: + r"""Return a callable for the get notification channel + verification code method over gRPC. + + Requests a verification code for an already verified + channel that can then be used in a call to + VerifyNotificationChannel() on a different channel with + an equivalent identity in the same or in a different + project. This makes it possible to copy a channel + between projects without requiring manual reverification + of the channel. If the channel is not in the verified + state, this method will fail (in other words, this may + only be used if the + SendNotificationChannelVerificationCode and + VerifyNotificationChannel paths have already been used + to put the given channel into the verified state). + + There is no guarantee that the verification codes + returned by this method will be of a similar structure + or form as the ones that are delivered to the channel + via SendNotificationChannelVerificationCode; while + VerifyNotificationChannel() will recognize both the + codes delivered via + SendNotificationChannelVerificationCode() and returned + from GetNotificationChannelVerificationCode(), it is + typically the case that the verification codes delivered + via + SendNotificationChannelVerificationCode() will be + shorter and also have a shorter expiration (e.g. codes + such as "G-123456") whereas GetVerificationCode() will + typically return a much longer, websafe base 64 encoded + string that has a longer expiration time. + + Returns: + Callable[[~.GetNotificationChannelVerificationCodeRequest], + ~.GetNotificationChannelVerificationCodeResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_notification_channel_verification_code' not in self._stubs: + self._stubs['get_notification_channel_verification_code'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/GetNotificationChannelVerificationCode', + request_serializer=notification_service.GetNotificationChannelVerificationCodeRequest.serialize, + response_deserializer=notification_service.GetNotificationChannelVerificationCodeResponse.deserialize, + ) + return self._stubs['get_notification_channel_verification_code'] + + @property + def verify_notification_channel(self) -> Callable[ + [notification_service.VerifyNotificationChannelRequest], + notification.NotificationChannel]: + r"""Return a callable for the verify notification channel method over gRPC. + + Verifies a ``NotificationChannel`` by proving receipt of the + code delivered to the channel as a result of calling + ``SendNotificationChannelVerificationCode``. + + Returns: + Callable[[~.VerifyNotificationChannelRequest], + ~.NotificationChannel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'verify_notification_channel' not in self._stubs: + self._stubs['verify_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/VerifyNotificationChannel', + request_serializer=notification_service.VerifyNotificationChannelRequest.serialize, + response_deserializer=notification.NotificationChannel.deserialize, + ) + return self._stubs['verify_notification_channel'] + + +__all__ = ( + 'NotificationChannelServiceGrpcTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..f17cc7c2 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py @@ -0,0 +1,542 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.monitoring_v3.types import notification +from google.cloud.monitoring_v3.types import notification_service +from google.protobuf import empty_pb2 # type: ignore +from .base import NotificationChannelServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import NotificationChannelServiceGrpcTransport + + +class NotificationChannelServiceGrpcAsyncIOTransport(NotificationChannelServiceTransport): + """gRPC AsyncIO backend transport for NotificationChannelService. + + The Notification Channel API provides access to configuration + that controls how messages related to incidents are sent. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_notification_channel_descriptors(self) -> Callable[ + [notification_service.ListNotificationChannelDescriptorsRequest], + Awaitable[notification_service.ListNotificationChannelDescriptorsResponse]]: + r"""Return a callable for the list notification channel + descriptors method over gRPC. + + Lists the descriptors for supported channel types. + The use of descriptors makes it possible for new channel + types to be dynamically added. + + Returns: + Callable[[~.ListNotificationChannelDescriptorsRequest], + Awaitable[~.ListNotificationChannelDescriptorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_notification_channel_descriptors' not in self._stubs: + self._stubs['list_notification_channel_descriptors'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/ListNotificationChannelDescriptors', + request_serializer=notification_service.ListNotificationChannelDescriptorsRequest.serialize, + response_deserializer=notification_service.ListNotificationChannelDescriptorsResponse.deserialize, + ) + return self._stubs['list_notification_channel_descriptors'] + + @property + def get_notification_channel_descriptor(self) -> Callable[ + [notification_service.GetNotificationChannelDescriptorRequest], + Awaitable[notification.NotificationChannelDescriptor]]: + r"""Return a callable for the get notification channel + descriptor method over gRPC. + + Gets a single channel descriptor. The descriptor + indicates which fields are expected / permitted for a + notification channel of the given type. + + Returns: + Callable[[~.GetNotificationChannelDescriptorRequest], + Awaitable[~.NotificationChannelDescriptor]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_notification_channel_descriptor' not in self._stubs: + self._stubs['get_notification_channel_descriptor'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/GetNotificationChannelDescriptor', + request_serializer=notification_service.GetNotificationChannelDescriptorRequest.serialize, + response_deserializer=notification.NotificationChannelDescriptor.deserialize, + ) + return self._stubs['get_notification_channel_descriptor'] + + @property + def list_notification_channels(self) -> Callable[ + [notification_service.ListNotificationChannelsRequest], + Awaitable[notification_service.ListNotificationChannelsResponse]]: + r"""Return a callable for the list notification channels method over gRPC. + + Lists the notification channels that have been + created for the project. + + Returns: + Callable[[~.ListNotificationChannelsRequest], + Awaitable[~.ListNotificationChannelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_notification_channels' not in self._stubs: + self._stubs['list_notification_channels'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/ListNotificationChannels', + request_serializer=notification_service.ListNotificationChannelsRequest.serialize, + response_deserializer=notification_service.ListNotificationChannelsResponse.deserialize, + ) + return self._stubs['list_notification_channels'] + + @property + def get_notification_channel(self) -> Callable[ + [notification_service.GetNotificationChannelRequest], + Awaitable[notification.NotificationChannel]]: + r"""Return a callable for the get notification channel method over gRPC. + + Gets a single notification channel. The channel + includes the relevant configuration details with which + the channel was created. However, the response may + truncate or omit passwords, API keys, or other private + key matter and thus the response may not be 100% + identical to the information that was supplied in the + call to the create method. + + Returns: + Callable[[~.GetNotificationChannelRequest], + Awaitable[~.NotificationChannel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_notification_channel' not in self._stubs: + self._stubs['get_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/GetNotificationChannel', + request_serializer=notification_service.GetNotificationChannelRequest.serialize, + response_deserializer=notification.NotificationChannel.deserialize, + ) + return self._stubs['get_notification_channel'] + + @property + def create_notification_channel(self) -> Callable[ + [notification_service.CreateNotificationChannelRequest], + Awaitable[notification.NotificationChannel]]: + r"""Return a callable for the create notification channel method over gRPC. + + Creates a new notification channel, representing a + single notification endpoint such as an email address, + SMS number, or PagerDuty service. + + Returns: + Callable[[~.CreateNotificationChannelRequest], + Awaitable[~.NotificationChannel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_notification_channel' not in self._stubs: + self._stubs['create_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/CreateNotificationChannel', + request_serializer=notification_service.CreateNotificationChannelRequest.serialize, + response_deserializer=notification.NotificationChannel.deserialize, + ) + return self._stubs['create_notification_channel'] + + @property + def update_notification_channel(self) -> Callable[ + [notification_service.UpdateNotificationChannelRequest], + Awaitable[notification.NotificationChannel]]: + r"""Return a callable for the update notification channel method over gRPC. + + Updates a notification channel. Fields not specified + in the field mask remain unchanged. + + Returns: + Callable[[~.UpdateNotificationChannelRequest], + Awaitable[~.NotificationChannel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_notification_channel' not in self._stubs: + self._stubs['update_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/UpdateNotificationChannel', + request_serializer=notification_service.UpdateNotificationChannelRequest.serialize, + response_deserializer=notification.NotificationChannel.deserialize, + ) + return self._stubs['update_notification_channel'] + + @property + def delete_notification_channel(self) -> Callable[ + [notification_service.DeleteNotificationChannelRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete notification channel method over gRPC. + + Deletes a notification channel. + + Returns: + Callable[[~.DeleteNotificationChannelRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_notification_channel' not in self._stubs: + self._stubs['delete_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/DeleteNotificationChannel', + request_serializer=notification_service.DeleteNotificationChannelRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_notification_channel'] + + @property + def send_notification_channel_verification_code(self) -> Callable[ + [notification_service.SendNotificationChannelVerificationCodeRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the send notification channel + verification code method over gRPC. + + Causes a verification code to be delivered to the channel. The + code can then be supplied in ``VerifyNotificationChannel`` to + verify the channel. + + Returns: + Callable[[~.SendNotificationChannelVerificationCodeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'send_notification_channel_verification_code' not in self._stubs: + self._stubs['send_notification_channel_verification_code'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/SendNotificationChannelVerificationCode', + request_serializer=notification_service.SendNotificationChannelVerificationCodeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['send_notification_channel_verification_code'] + + @property + def get_notification_channel_verification_code(self) -> Callable[ + [notification_service.GetNotificationChannelVerificationCodeRequest], + Awaitable[notification_service.GetNotificationChannelVerificationCodeResponse]]: + r"""Return a callable for the get notification channel + verification code method over gRPC. + + Requests a verification code for an already verified + channel that can then be used in a call to + VerifyNotificationChannel() on a different channel with + an equivalent identity in the same or in a different + project. This makes it possible to copy a channel + between projects without requiring manual reverification + of the channel. If the channel is not in the verified + state, this method will fail (in other words, this may + only be used if the + SendNotificationChannelVerificationCode and + VerifyNotificationChannel paths have already been used + to put the given channel into the verified state). + + There is no guarantee that the verification codes + returned by this method will be of a similar structure + or form as the ones that are delivered to the channel + via SendNotificationChannelVerificationCode; while + VerifyNotificationChannel() will recognize both the + codes delivered via + SendNotificationChannelVerificationCode() and returned + from GetNotificationChannelVerificationCode(), it is + typically the case that the verification codes delivered + via + SendNotificationChannelVerificationCode() will be + shorter and also have a shorter expiration (e.g. codes + such as "G-123456") whereas GetVerificationCode() will + typically return a much longer, websafe base 64 encoded + string that has a longer expiration time. + + Returns: + Callable[[~.GetNotificationChannelVerificationCodeRequest], + Awaitable[~.GetNotificationChannelVerificationCodeResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_notification_channel_verification_code' not in self._stubs: + self._stubs['get_notification_channel_verification_code'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/GetNotificationChannelVerificationCode', + request_serializer=notification_service.GetNotificationChannelVerificationCodeRequest.serialize, + response_deserializer=notification_service.GetNotificationChannelVerificationCodeResponse.deserialize, + ) + return self._stubs['get_notification_channel_verification_code'] + + @property + def verify_notification_channel(self) -> Callable[ + [notification_service.VerifyNotificationChannelRequest], + Awaitable[notification.NotificationChannel]]: + r"""Return a callable for the verify notification channel method over gRPC. + + Verifies a ``NotificationChannel`` by proving receipt of the + code delivered to the channel as a result of calling + ``SendNotificationChannelVerificationCode``. + + Returns: + Callable[[~.VerifyNotificationChannelRequest], + Awaitable[~.NotificationChannel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'verify_notification_channel' not in self._stubs: + self._stubs['verify_notification_channel'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.NotificationChannelService/VerifyNotificationChannel', + request_serializer=notification_service.VerifyNotificationChannelRequest.serialize, + response_deserializer=notification.NotificationChannel.deserialize, + ) + return self._stubs['verify_notification_channel'] + + +__all__ = ( + 'NotificationChannelServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/__init__.py new file mode 100644 index 00000000..f4b8a9c1 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import QueryServiceClient +from .async_client import QueryServiceAsyncClient + +__all__ = ( + 'QueryServiceClient', + 'QueryServiceAsyncClient', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/async_client.py new file mode 100644 index 00000000..d4f0d382 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/async_client.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.services.query_service import pagers +from google.cloud.monitoring_v3.types import metric +from google.cloud.monitoring_v3.types import metric_service +from .transports.base import QueryServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import QueryServiceGrpcAsyncIOTransport +from .client import QueryServiceClient + + +class QueryServiceAsyncClient: + """The QueryService API is used to manage time series data in + Stackdriver Monitoring. Time series data is a collection of data + points that describes the time-varying values of a metric. + """ + + _client: QueryServiceClient + + DEFAULT_ENDPOINT = QueryServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = QueryServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(QueryServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(QueryServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(QueryServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(QueryServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(QueryServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(QueryServiceClient.parse_common_organization_path) + common_project_path = staticmethod(QueryServiceClient.common_project_path) + parse_common_project_path = staticmethod(QueryServiceClient.parse_common_project_path) + common_location_path = staticmethod(QueryServiceClient.common_location_path) + parse_common_location_path = staticmethod(QueryServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QueryServiceAsyncClient: The constructed client. + """ + return QueryServiceClient.from_service_account_info.__func__(QueryServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QueryServiceAsyncClient: The constructed client. + """ + return QueryServiceClient.from_service_account_file.__func__(QueryServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> QueryServiceTransport: + """Returns the transport used by the client instance. + + Returns: + QueryServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(QueryServiceClient).get_transport_class, type(QueryServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, QueryServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the query service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.QueryServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = QueryServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def query_time_series(self, + request: metric_service.QueryTimeSeriesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.QueryTimeSeriesAsyncPager: + r"""Queries time series using Monitoring Query Language. + This method does not require a Workspace. + + Args: + request (:class:`google.cloud.monitoring_v3.types.QueryTimeSeriesRequest`): + The request object. The `QueryTimeSeries` request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.query_service.pagers.QueryTimeSeriesAsyncPager: + The QueryTimeSeries response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = metric_service.QueryTimeSeriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.query_time_series, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.QueryTimeSeriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "QueryServiceAsyncClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/client.py new file mode 100644 index 00000000..e36c6441 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/client.py @@ -0,0 +1,414 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.services.query_service import pagers +from google.cloud.monitoring_v3.types import metric +from google.cloud.monitoring_v3.types import metric_service +from .transports.base import QueryServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import QueryServiceGrpcTransport +from .transports.grpc_asyncio import QueryServiceGrpcAsyncIOTransport + + +class QueryServiceClientMeta(type): + """Metaclass for the QueryService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[QueryServiceTransport]] + _transport_registry["grpc"] = QueryServiceGrpcTransport + _transport_registry["grpc_asyncio"] = QueryServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[QueryServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class QueryServiceClient(metaclass=QueryServiceClientMeta): + """The QueryService API is used to manage time series data in + Stackdriver Monitoring. Time series data is a collection of data + points that describes the time-varying values of a metric. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "monitoring.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QueryServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QueryServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> QueryServiceTransport: + """Returns the transport used by the client instance. + + Returns: + QueryServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, QueryServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the query service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, QueryServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, QueryServiceTransport): + # transport is a QueryServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def query_time_series(self, + request: metric_service.QueryTimeSeriesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.QueryTimeSeriesPager: + r"""Queries time series using Monitoring Query Language. + This method does not require a Workspace. + + Args: + request (google.cloud.monitoring_v3.types.QueryTimeSeriesRequest): + The request object. The `QueryTimeSeries` request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.query_service.pagers.QueryTimeSeriesPager: + The QueryTimeSeries response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a metric_service.QueryTimeSeriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metric_service.QueryTimeSeriesRequest): + request = metric_service.QueryTimeSeriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_time_series] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.QueryTimeSeriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "QueryServiceClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/pagers.py new file mode 100644 index 00000000..f83cde37 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/pagers.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.monitoring_v3.types import metric +from google.cloud.monitoring_v3.types import metric_service + + +class QueryTimeSeriesPager: + """A pager for iterating through ``query_time_series`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.QueryTimeSeriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``time_series_data`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``QueryTimeSeries`` requests and continue to iterate + through the ``time_series_data`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.QueryTimeSeriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metric_service.QueryTimeSeriesResponse], + request: metric_service.QueryTimeSeriesRequest, + response: metric_service.QueryTimeSeriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.QueryTimeSeriesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.QueryTimeSeriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metric_service.QueryTimeSeriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[metric_service.QueryTimeSeriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[metric.TimeSeriesData]: + for page in self.pages: + yield from page.time_series_data + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class QueryTimeSeriesAsyncPager: + """A pager for iterating through ``query_time_series`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.QueryTimeSeriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``time_series_data`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``QueryTimeSeries`` requests and continue to iterate + through the ``time_series_data`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.QueryTimeSeriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metric_service.QueryTimeSeriesResponse]], + request: metric_service.QueryTimeSeriesRequest, + response: metric_service.QueryTimeSeriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.QueryTimeSeriesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.QueryTimeSeriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metric_service.QueryTimeSeriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[metric_service.QueryTimeSeriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[metric.TimeSeriesData]: + async def async_generator(): + async for page in self.pages: + for response in page.time_series_data: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/__init__.py new file mode 100644 index 00000000..7b5d1cad --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import QueryServiceTransport +from .grpc import QueryServiceGrpcTransport +from .grpc_asyncio import QueryServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[QueryServiceTransport]] +_transport_registry['grpc'] = QueryServiceGrpcTransport +_transport_registry['grpc_asyncio'] = QueryServiceGrpcAsyncIOTransport + +__all__ = ( + 'QueryServiceTransport', + 'QueryServiceGrpcTransport', + 'QueryServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/base.py new file mode 100644 index 00000000..9ddc63af --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/base.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.types import metric_service + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-monitoring', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class QueryServiceTransport(abc.ABC): + """Abstract transport class for QueryService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ) + + DEFAULT_HOST: str = 'monitoring.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.query_time_series: gapic_v1.method.wrap_method( + self.query_time_series, + default_timeout=None, + client_info=client_info, + ), + } + + @property + def query_time_series(self) -> Callable[ + [metric_service.QueryTimeSeriesRequest], + Union[ + metric_service.QueryTimeSeriesResponse, + Awaitable[metric_service.QueryTimeSeriesResponse] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'QueryServiceTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc.py new file mode 100644 index 00000000..28ea91c0 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc.py @@ -0,0 +1,255 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.monitoring_v3.types import metric_service +from .base import QueryServiceTransport, DEFAULT_CLIENT_INFO + + +class QueryServiceGrpcTransport(QueryServiceTransport): + """gRPC backend transport for QueryService. + + The QueryService API is used to manage time series data in + Stackdriver Monitoring. Time series data is a collection of data + points that describes the time-varying values of a metric. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def query_time_series(self) -> Callable[ + [metric_service.QueryTimeSeriesRequest], + metric_service.QueryTimeSeriesResponse]: + r"""Return a callable for the query time series method over gRPC. + + Queries time series using Monitoring Query Language. + This method does not require a Workspace. + + Returns: + Callable[[~.QueryTimeSeriesRequest], + ~.QueryTimeSeriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_time_series' not in self._stubs: + self._stubs['query_time_series'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.QueryService/QueryTimeSeries', + request_serializer=metric_service.QueryTimeSeriesRequest.serialize, + response_deserializer=metric_service.QueryTimeSeriesResponse.deserialize, + ) + return self._stubs['query_time_series'] + + +__all__ = ( + 'QueryServiceGrpcTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..4d1c336e --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py @@ -0,0 +1,259 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.monitoring_v3.types import metric_service +from .base import QueryServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import QueryServiceGrpcTransport + + +class QueryServiceGrpcAsyncIOTransport(QueryServiceTransport): + """gRPC AsyncIO backend transport for QueryService. + + The QueryService API is used to manage time series data in + Stackdriver Monitoring. Time series data is a collection of data + points that describes the time-varying values of a metric. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def query_time_series(self) -> Callable[ + [metric_service.QueryTimeSeriesRequest], + Awaitable[metric_service.QueryTimeSeriesResponse]]: + r"""Return a callable for the query time series method over gRPC. + + Queries time series using Monitoring Query Language. + This method does not require a Workspace. + + Returns: + Callable[[~.QueryTimeSeriesRequest], + Awaitable[~.QueryTimeSeriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_time_series' not in self._stubs: + self._stubs['query_time_series'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.QueryService/QueryTimeSeries', + request_serializer=metric_service.QueryTimeSeriesRequest.serialize, + response_deserializer=metric_service.QueryTimeSeriesResponse.deserialize, + ) + return self._stubs['query_time_series'] + + +__all__ = ( + 'QueryServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/__init__.py new file mode 100644 index 00000000..68f0d87a --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ServiceMonitoringServiceClient +from .async_client import ServiceMonitoringServiceAsyncClient + +__all__ = ( + 'ServiceMonitoringServiceClient', + 'ServiceMonitoringServiceAsyncClient', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py new file mode 100644 index 00000000..f255e172 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py @@ -0,0 +1,1061 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.services.service_monitoring_service import pagers +from google.cloud.monitoring_v3.types import service +from google.cloud.monitoring_v3.types import service as gm_service +from google.cloud.monitoring_v3.types import service_service +from google.protobuf import duration_pb2 # type: ignore +from google.type import calendar_period_pb2 # type: ignore +from .transports.base import ServiceMonitoringServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ServiceMonitoringServiceGrpcAsyncIOTransport +from .client import ServiceMonitoringServiceClient + + +class ServiceMonitoringServiceAsyncClient: + """The Cloud Monitoring Service-Oriented Monitoring API has endpoints + for managing and querying aspects of a workspace's services. These + include the ``Service``'s monitored resources, its Service-Level + Objectives, and a taxonomy of categorized Health Metrics. + """ + + _client: ServiceMonitoringServiceClient + + DEFAULT_ENDPOINT = ServiceMonitoringServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ServiceMonitoringServiceClient.DEFAULT_MTLS_ENDPOINT + + service_path = staticmethod(ServiceMonitoringServiceClient.service_path) + parse_service_path = staticmethod(ServiceMonitoringServiceClient.parse_service_path) + service_level_objective_path = staticmethod(ServiceMonitoringServiceClient.service_level_objective_path) + parse_service_level_objective_path = staticmethod(ServiceMonitoringServiceClient.parse_service_level_objective_path) + common_billing_account_path = staticmethod(ServiceMonitoringServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(ServiceMonitoringServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(ServiceMonitoringServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(ServiceMonitoringServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(ServiceMonitoringServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(ServiceMonitoringServiceClient.parse_common_organization_path) + common_project_path = staticmethod(ServiceMonitoringServiceClient.common_project_path) + parse_common_project_path = staticmethod(ServiceMonitoringServiceClient.parse_common_project_path) + common_location_path = staticmethod(ServiceMonitoringServiceClient.common_location_path) + parse_common_location_path = staticmethod(ServiceMonitoringServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceMonitoringServiceAsyncClient: The constructed client. + """ + return ServiceMonitoringServiceClient.from_service_account_info.__func__(ServiceMonitoringServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceMonitoringServiceAsyncClient: The constructed client. + """ + return ServiceMonitoringServiceClient.from_service_account_file.__func__(ServiceMonitoringServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ServiceMonitoringServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceMonitoringServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(ServiceMonitoringServiceClient).get_transport_class, type(ServiceMonitoringServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, ServiceMonitoringServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service monitoring service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ServiceMonitoringServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ServiceMonitoringServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_service(self, + request: service_service.CreateServiceRequest = None, + *, + parent: str = None, + service: gm_service.Service = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gm_service.Service: + r"""Create a ``Service``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.CreateServiceRequest`): + The request object. The `CreateService` request. + parent (:class:`str`): + Required. Resource + `name `__ + of the parent workspace. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service (:class:`google.cloud.monitoring_v3.types.Service`): + Required. The ``Service`` to create. + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Service: + A Service is a discrete, autonomous, and network-accessible unit, designed + to solve an individual concern + ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). + In Cloud Monitoring, a Service acts as the root + resource under which operational aspects of the + service are accessible. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, service]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.CreateServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if service is not None: + request.service = service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_service, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_service(self, + request: service_service.GetServiceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Service: + r"""Get the named ``Service``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetServiceRequest`): + The request object. The `GetService` request. + name (:class:`str`): + Required. Resource name of the ``Service``. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Service: + A Service is a discrete, autonomous, and network-accessible unit, designed + to solve an individual concern + ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). + In Cloud Monitoring, a Service acts as the root + resource under which operational aspects of the + service are accessible. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.GetServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_service, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_services(self, + request: service_service.ListServicesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServicesAsyncPager: + r"""List ``Service``\ s for this workspace. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListServicesRequest`): + The request object. The `ListServices` request. + parent (:class:`str`): + Required. Resource name of the parent containing the + listed services, either a + `project `__ + or a Monitoring Workspace. The formats are: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + workspaces/[HOST_PROJECT_ID_OR_NUMBER] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.service_monitoring_service.pagers.ListServicesAsyncPager: + The ListServices response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.ListServicesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_services, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListServicesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_service(self, + request: service_service.UpdateServiceRequest = None, + *, + service: gm_service.Service = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gm_service.Service: + r"""Update this ``Service``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.UpdateServiceRequest`): + The request object. The `UpdateService` request. + service (:class:`google.cloud.monitoring_v3.types.Service`): + Required. The ``Service`` to draw updates from. The + given ``name`` specifies the resource to update. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Service: + A Service is a discrete, autonomous, and network-accessible unit, designed + to solve an individual concern + ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). + In Cloud Monitoring, a Service acts as the root + resource under which operational aspects of the + service are accessible. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.UpdateServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service is not None: + request.service = service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_service, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service.name", request.service.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_service(self, + request: service_service.DeleteServiceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Soft delete this ``Service``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.DeleteServiceRequest`): + The request object. The `DeleteService` request. + name (:class:`str`): + Required. Resource name of the ``Service`` to delete. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.DeleteServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_service, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_service_level_objective(self, + request: service_service.CreateServiceLevelObjectiveRequest = None, + *, + parent: str = None, + service_level_objective: service.ServiceLevelObjective = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ServiceLevelObjective: + r"""Create a ``ServiceLevelObjective`` for the given ``Service``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.CreateServiceLevelObjectiveRequest`): + The request object. The `CreateServiceLevelObjective` + request. + parent (:class:`str`): + Required. Resource name of the parent ``Service``. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_level_objective (:class:`google.cloud.monitoring_v3.types.ServiceLevelObjective`): + Required. The ``ServiceLevelObjective`` to create. The + provided ``name`` will be respected if no + ``ServiceLevelObjective`` exists with this name. + + This corresponds to the ``service_level_objective`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.ServiceLevelObjective: + A Service-Level Objective (SLO) + describes a level of desired good + service. It consists of a service-level + indicator (SLI), a performance goal, and + a period over which the objective is to + be evaluated against that goal. The SLO + can use SLIs defined in a number of + different manners. Typical SLOs might + include "99% of requests in each rolling + week have latency below 200 + milliseconds" or "99.5% of requests in + each calendar month return + successfully." + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, service_level_objective]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.CreateServiceLevelObjectiveRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if service_level_objective is not None: + request.service_level_objective = service_level_objective + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_service_level_objective, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_service_level_objective(self, + request: service_service.GetServiceLevelObjectiveRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ServiceLevelObjective: + r"""Get a ``ServiceLevelObjective`` by name. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetServiceLevelObjectiveRequest`): + The request object. The `GetServiceLevelObjective` + request. + name (:class:`str`): + Required. Resource name of the ``ServiceLevelObjective`` + to get. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.ServiceLevelObjective: + A Service-Level Objective (SLO) + describes a level of desired good + service. It consists of a service-level + indicator (SLI), a performance goal, and + a period over which the objective is to + be evaluated against that goal. The SLO + can use SLIs defined in a number of + different manners. Typical SLOs might + include "99% of requests in each rolling + week have latency below 200 + milliseconds" or "99.5% of requests in + each calendar month return + successfully." + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.GetServiceLevelObjectiveRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_service_level_objective, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_service_level_objectives(self, + request: service_service.ListServiceLevelObjectivesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServiceLevelObjectivesAsyncPager: + r"""List the ``ServiceLevelObjective``\ s for the given ``Service``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest`): + The request object. The `ListServiceLevelObjectives` + request. + parent (:class:`str`): + Required. Resource name of the parent containing the + listed SLOs, either a project or a Monitoring Workspace. + The formats are: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + workspaces/[HOST_PROJECT_ID_OR_NUMBER]/services/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.service_monitoring_service.pagers.ListServiceLevelObjectivesAsyncPager: + The ListServiceLevelObjectives response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.ListServiceLevelObjectivesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_service_level_objectives, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListServiceLevelObjectivesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_service_level_objective(self, + request: service_service.UpdateServiceLevelObjectiveRequest = None, + *, + service_level_objective: service.ServiceLevelObjective = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ServiceLevelObjective: + r"""Update the given ``ServiceLevelObjective``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.UpdateServiceLevelObjectiveRequest`): + The request object. The `UpdateServiceLevelObjective` + request. + service_level_objective (:class:`google.cloud.monitoring_v3.types.ServiceLevelObjective`): + Required. The ``ServiceLevelObjective`` to draw updates + from. The given ``name`` specifies the resource to + update. + + This corresponds to the ``service_level_objective`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.ServiceLevelObjective: + A Service-Level Objective (SLO) + describes a level of desired good + service. It consists of a service-level + indicator (SLI), a performance goal, and + a period over which the objective is to + be evaluated against that goal. The SLO + can use SLIs defined in a number of + different manners. Typical SLOs might + include "99% of requests in each rolling + week have latency below 200 + milliseconds" or "99.5% of requests in + each calendar month return + successfully." + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_level_objective]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.UpdateServiceLevelObjectiveRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_level_objective is not None: + request.service_level_objective = service_level_objective + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_service_level_objective, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_level_objective.name", request.service_level_objective.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_service_level_objective(self, + request: service_service.DeleteServiceLevelObjectiveRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete the given ``ServiceLevelObjective``. + + Args: + request (:class:`google.cloud.monitoring_v3.types.DeleteServiceLevelObjectiveRequest`): + The request object. The `DeleteServiceLevelObjective` + request. + name (:class:`str`): + Required. Resource name of the ``ServiceLevelObjective`` + to delete. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = service_service.DeleteServiceLevelObjectiveRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_service_level_objective, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "ServiceMonitoringServiceAsyncClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/client.py new file mode 100644 index 00000000..59b72d13 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/client.py @@ -0,0 +1,1225 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.services.service_monitoring_service import pagers +from google.cloud.monitoring_v3.types import service +from google.cloud.monitoring_v3.types import service as gm_service +from google.cloud.monitoring_v3.types import service_service +from google.protobuf import duration_pb2 # type: ignore +from google.type import calendar_period_pb2 # type: ignore +from .transports.base import ServiceMonitoringServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ServiceMonitoringServiceGrpcTransport +from .transports.grpc_asyncio import ServiceMonitoringServiceGrpcAsyncIOTransport + + +class ServiceMonitoringServiceClientMeta(type): + """Metaclass for the ServiceMonitoringService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ServiceMonitoringServiceTransport]] + _transport_registry["grpc"] = ServiceMonitoringServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ServiceMonitoringServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[ServiceMonitoringServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ServiceMonitoringServiceClient(metaclass=ServiceMonitoringServiceClientMeta): + """The Cloud Monitoring Service-Oriented Monitoring API has endpoints + for managing and querying aspects of a workspace's services. These + include the ``Service``'s monitored resources, its Service-Level + Objectives, and a taxonomy of categorized Health Metrics. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "monitoring.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceMonitoringServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceMonitoringServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ServiceMonitoringServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceMonitoringServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def service_path(project: str,service: str,) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/services/{service}".format(project=project, service=service, ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str,str]: + """Parses a service path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/services/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_level_objective_path(project: str,service: str,service_level_objective: str,) -> str: + """Returns a fully-qualified service_level_objective string.""" + return "projects/{project}/services/{service}/serviceLevelObjectives/{service_level_objective}".format(project=project, service=service, service_level_objective=service_level_objective, ) + + @staticmethod + def parse_service_level_objective_path(path: str) -> Dict[str,str]: + """Parses a service_level_objective path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/services/(?P.+?)/serviceLevelObjectives/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ServiceMonitoringServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service monitoring service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ServiceMonitoringServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ServiceMonitoringServiceTransport): + # transport is a ServiceMonitoringServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def create_service(self, + request: service_service.CreateServiceRequest = None, + *, + parent: str = None, + service: gm_service.Service = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gm_service.Service: + r"""Create a ``Service``. + + Args: + request (google.cloud.monitoring_v3.types.CreateServiceRequest): + The request object. The `CreateService` request. + parent (str): + Required. Resource + `name `__ + of the parent workspace. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service (google.cloud.monitoring_v3.types.Service): + Required. The ``Service`` to create. + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Service: + A Service is a discrete, autonomous, and network-accessible unit, designed + to solve an individual concern + ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). + In Cloud Monitoring, a Service acts as the root + resource under which operational aspects of the + service are accessible. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.CreateServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.CreateServiceRequest): + request = service_service.CreateServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if service is not None: + request.service = service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_service(self, + request: service_service.GetServiceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Service: + r"""Get the named ``Service``. + + Args: + request (google.cloud.monitoring_v3.types.GetServiceRequest): + The request object. The `GetService` request. + name (str): + Required. Resource name of the ``Service``. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Service: + A Service is a discrete, autonomous, and network-accessible unit, designed + to solve an individual concern + ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). + In Cloud Monitoring, a Service acts as the root + resource under which operational aspects of the + service are accessible. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.GetServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.GetServiceRequest): + request = service_service.GetServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_services(self, + request: service_service.ListServicesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServicesPager: + r"""List ``Service``\ s for this workspace. + + Args: + request (google.cloud.monitoring_v3.types.ListServicesRequest): + The request object. The `ListServices` request. + parent (str): + Required. Resource name of the parent containing the + listed services, either a + `project `__ + or a Monitoring Workspace. The formats are: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + workspaces/[HOST_PROJECT_ID_OR_NUMBER] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.service_monitoring_service.pagers.ListServicesPager: + The ListServices response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.ListServicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.ListServicesRequest): + request = service_service.ListServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_services] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListServicesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_service(self, + request: service_service.UpdateServiceRequest = None, + *, + service: gm_service.Service = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gm_service.Service: + r"""Update this ``Service``. + + Args: + request (google.cloud.monitoring_v3.types.UpdateServiceRequest): + The request object. The `UpdateService` request. + service (google.cloud.monitoring_v3.types.Service): + Required. The ``Service`` to draw updates from. The + given ``name`` specifies the resource to update. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.Service: + A Service is a discrete, autonomous, and network-accessible unit, designed + to solve an individual concern + ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). + In Cloud Monitoring, a Service acts as the root + resource under which operational aspects of the + service are accessible. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.UpdateServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.UpdateServiceRequest): + request = service_service.UpdateServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service is not None: + request.service = service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service.name", request.service.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_service(self, + request: service_service.DeleteServiceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Soft delete this ``Service``. + + Args: + request (google.cloud.monitoring_v3.types.DeleteServiceRequest): + The request object. The `DeleteService` request. + name (str): + Required. Resource name of the ``Service`` to delete. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.DeleteServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.DeleteServiceRequest): + request = service_service.DeleteServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_service_level_objective(self, + request: service_service.CreateServiceLevelObjectiveRequest = None, + *, + parent: str = None, + service_level_objective: service.ServiceLevelObjective = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ServiceLevelObjective: + r"""Create a ``ServiceLevelObjective`` for the given ``Service``. + + Args: + request (google.cloud.monitoring_v3.types.CreateServiceLevelObjectiveRequest): + The request object. The `CreateServiceLevelObjective` + request. + parent (str): + Required. Resource name of the parent ``Service``. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_level_objective (google.cloud.monitoring_v3.types.ServiceLevelObjective): + Required. The ``ServiceLevelObjective`` to create. The + provided ``name`` will be respected if no + ``ServiceLevelObjective`` exists with this name. + + This corresponds to the ``service_level_objective`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.ServiceLevelObjective: + A Service-Level Objective (SLO) + describes a level of desired good + service. It consists of a service-level + indicator (SLI), a performance goal, and + a period over which the objective is to + be evaluated against that goal. The SLO + can use SLIs defined in a number of + different manners. Typical SLOs might + include "99% of requests in each rolling + week have latency below 200 + milliseconds" or "99.5% of requests in + each calendar month return + successfully." + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, service_level_objective]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.CreateServiceLevelObjectiveRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.CreateServiceLevelObjectiveRequest): + request = service_service.CreateServiceLevelObjectiveRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if service_level_objective is not None: + request.service_level_objective = service_level_objective + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_service_level_objective] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_service_level_objective(self, + request: service_service.GetServiceLevelObjectiveRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ServiceLevelObjective: + r"""Get a ``ServiceLevelObjective`` by name. + + Args: + request (google.cloud.monitoring_v3.types.GetServiceLevelObjectiveRequest): + The request object. The `GetServiceLevelObjective` + request. + name (str): + Required. Resource name of the ``ServiceLevelObjective`` + to get. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.ServiceLevelObjective: + A Service-Level Objective (SLO) + describes a level of desired good + service. It consists of a service-level + indicator (SLI), a performance goal, and + a period over which the objective is to + be evaluated against that goal. The SLO + can use SLIs defined in a number of + different manners. Typical SLOs might + include "99% of requests in each rolling + week have latency below 200 + milliseconds" or "99.5% of requests in + each calendar month return + successfully." + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.GetServiceLevelObjectiveRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.GetServiceLevelObjectiveRequest): + request = service_service.GetServiceLevelObjectiveRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_service_level_objective] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_service_level_objectives(self, + request: service_service.ListServiceLevelObjectivesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServiceLevelObjectivesPager: + r"""List the ``ServiceLevelObjective``\ s for the given ``Service``. + + Args: + request (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest): + The request object. The `ListServiceLevelObjectives` + request. + parent (str): + Required. Resource name of the parent containing the + listed SLOs, either a project or a Monitoring Workspace. + The formats are: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + workspaces/[HOST_PROJECT_ID_OR_NUMBER]/services/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.service_monitoring_service.pagers.ListServiceLevelObjectivesPager: + The ListServiceLevelObjectives response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.ListServiceLevelObjectivesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.ListServiceLevelObjectivesRequest): + request = service_service.ListServiceLevelObjectivesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_service_level_objectives] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListServiceLevelObjectivesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_service_level_objective(self, + request: service_service.UpdateServiceLevelObjectiveRequest = None, + *, + service_level_objective: service.ServiceLevelObjective = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ServiceLevelObjective: + r"""Update the given ``ServiceLevelObjective``. + + Args: + request (google.cloud.monitoring_v3.types.UpdateServiceLevelObjectiveRequest): + The request object. The `UpdateServiceLevelObjective` + request. + service_level_objective (google.cloud.monitoring_v3.types.ServiceLevelObjective): + Required. The ``ServiceLevelObjective`` to draw updates + from. The given ``name`` specifies the resource to + update. + + This corresponds to the ``service_level_objective`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.ServiceLevelObjective: + A Service-Level Objective (SLO) + describes a level of desired good + service. It consists of a service-level + indicator (SLI), a performance goal, and + a period over which the objective is to + be evaluated against that goal. The SLO + can use SLIs defined in a number of + different manners. Typical SLOs might + include "99% of requests in each rolling + week have latency below 200 + milliseconds" or "99.5% of requests in + each calendar month return + successfully." + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_level_objective]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.UpdateServiceLevelObjectiveRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.UpdateServiceLevelObjectiveRequest): + request = service_service.UpdateServiceLevelObjectiveRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_level_objective is not None: + request.service_level_objective = service_level_objective + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_service_level_objective] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_level_objective.name", request.service_level_objective.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_service_level_objective(self, + request: service_service.DeleteServiceLevelObjectiveRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete the given ``ServiceLevelObjective``. + + Args: + request (google.cloud.monitoring_v3.types.DeleteServiceLevelObjectiveRequest): + The request object. The `DeleteServiceLevelObjective` + request. + name (str): + Required. Resource name of the ``ServiceLevelObjective`` + to delete. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a service_service.DeleteServiceLevelObjectiveRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service_service.DeleteServiceLevelObjectiveRequest): + request = service_service.DeleteServiceLevelObjectiveRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_service_level_objective] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "ServiceMonitoringServiceClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/pagers.py new file mode 100644 index 00000000..e9c457ff --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/pagers.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.monitoring_v3.types import service +from google.cloud.monitoring_v3.types import service_service + + +class ListServicesPager: + """A pager for iterating through ``list_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListServicesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``services`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListServices`` requests and continue to iterate + through the ``services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service_service.ListServicesResponse], + request: service_service.ListServicesRequest, + response: service_service.ListServicesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListServicesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListServicesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service_service.ListServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[service_service.ListServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[service.Service]: + for page in self.pages: + yield from page.services + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListServicesAsyncPager: + """A pager for iterating through ``list_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListServicesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``services`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListServices`` requests and continue to iterate + through the ``services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service_service.ListServicesResponse]], + request: service_service.ListServicesRequest, + response: service_service.ListServicesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListServicesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListServicesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service_service.ListServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[service_service.ListServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[service.Service]: + async def async_generator(): + async for page in self.pages: + for response in page.services: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListServiceLevelObjectivesPager: + """A pager for iterating through ``list_service_level_objectives`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``service_level_objectives`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListServiceLevelObjectives`` requests and continue to iterate + through the ``service_level_objectives`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service_service.ListServiceLevelObjectivesResponse], + request: service_service.ListServiceLevelObjectivesRequest, + response: service_service.ListServiceLevelObjectivesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service_service.ListServiceLevelObjectivesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[service_service.ListServiceLevelObjectivesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[service.ServiceLevelObjective]: + for page in self.pages: + yield from page.service_level_objectives + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListServiceLevelObjectivesAsyncPager: + """A pager for iterating through ``list_service_level_objectives`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``service_level_objectives`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListServiceLevelObjectives`` requests and continue to iterate + through the ``service_level_objectives`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service_service.ListServiceLevelObjectivesResponse]], + request: service_service.ListServiceLevelObjectivesRequest, + response: service_service.ListServiceLevelObjectivesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service_service.ListServiceLevelObjectivesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[service_service.ListServiceLevelObjectivesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[service.ServiceLevelObjective]: + async def async_generator(): + async for page in self.pages: + for response in page.service_level_objectives: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/__init__.py new file mode 100644 index 00000000..c51e1a3a --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ServiceMonitoringServiceTransport +from .grpc import ServiceMonitoringServiceGrpcTransport +from .grpc_asyncio import ServiceMonitoringServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ServiceMonitoringServiceTransport]] +_transport_registry['grpc'] = ServiceMonitoringServiceGrpcTransport +_transport_registry['grpc_asyncio'] = ServiceMonitoringServiceGrpcAsyncIOTransport + +__all__ = ( + 'ServiceMonitoringServiceTransport', + 'ServiceMonitoringServiceGrpcTransport', + 'ServiceMonitoringServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py new file mode 100644 index 00000000..7811feed --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py @@ -0,0 +1,335 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.types import service +from google.cloud.monitoring_v3.types import service as gm_service +from google.cloud.monitoring_v3.types import service_service +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-monitoring', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class ServiceMonitoringServiceTransport(abc.ABC): + """Abstract transport class for ServiceMonitoringService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ) + + DEFAULT_HOST: str = 'monitoring.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_service: gapic_v1.method.wrap_method( + self.create_service, + default_timeout=30.0, + client_info=client_info, + ), + self.get_service: gapic_v1.method.wrap_method( + self.get_service, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_services: gapic_v1.method.wrap_method( + self.list_services, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.update_service: gapic_v1.method.wrap_method( + self.update_service, + default_timeout=30.0, + client_info=client_info, + ), + self.delete_service: gapic_v1.method.wrap_method( + self.delete_service, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.create_service_level_objective: gapic_v1.method.wrap_method( + self.create_service_level_objective, + default_timeout=30.0, + client_info=client_info, + ), + self.get_service_level_objective: gapic_v1.method.wrap_method( + self.get_service_level_objective, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_service_level_objectives: gapic_v1.method.wrap_method( + self.list_service_level_objectives, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.update_service_level_objective: gapic_v1.method.wrap_method( + self.update_service_level_objective, + default_timeout=30.0, + client_info=client_info, + ), + self.delete_service_level_objective: gapic_v1.method.wrap_method( + self.delete_service_level_objective, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + } + + @property + def create_service(self) -> Callable[ + [service_service.CreateServiceRequest], + Union[ + gm_service.Service, + Awaitable[gm_service.Service] + ]]: + raise NotImplementedError() + + @property + def get_service(self) -> Callable[ + [service_service.GetServiceRequest], + Union[ + service.Service, + Awaitable[service.Service] + ]]: + raise NotImplementedError() + + @property + def list_services(self) -> Callable[ + [service_service.ListServicesRequest], + Union[ + service_service.ListServicesResponse, + Awaitable[service_service.ListServicesResponse] + ]]: + raise NotImplementedError() + + @property + def update_service(self) -> Callable[ + [service_service.UpdateServiceRequest], + Union[ + gm_service.Service, + Awaitable[gm_service.Service] + ]]: + raise NotImplementedError() + + @property + def delete_service(self) -> Callable[ + [service_service.DeleteServiceRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_service_level_objective(self) -> Callable[ + [service_service.CreateServiceLevelObjectiveRequest], + Union[ + service.ServiceLevelObjective, + Awaitable[service.ServiceLevelObjective] + ]]: + raise NotImplementedError() + + @property + def get_service_level_objective(self) -> Callable[ + [service_service.GetServiceLevelObjectiveRequest], + Union[ + service.ServiceLevelObjective, + Awaitable[service.ServiceLevelObjective] + ]]: + raise NotImplementedError() + + @property + def list_service_level_objectives(self) -> Callable[ + [service_service.ListServiceLevelObjectivesRequest], + Union[ + service_service.ListServiceLevelObjectivesResponse, + Awaitable[service_service.ListServiceLevelObjectivesResponse] + ]]: + raise NotImplementedError() + + @property + def update_service_level_objective(self) -> Callable[ + [service_service.UpdateServiceLevelObjectiveRequest], + Union[ + service.ServiceLevelObjective, + Awaitable[service.ServiceLevelObjective] + ]]: + raise NotImplementedError() + + @property + def delete_service_level_objective(self) -> Callable[ + [service_service.DeleteServiceLevelObjectiveRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'ServiceMonitoringServiceTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py new file mode 100644 index 00000000..2a98e523 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py @@ -0,0 +1,492 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.monitoring_v3.types import service +from google.cloud.monitoring_v3.types import service as gm_service +from google.cloud.monitoring_v3.types import service_service +from google.protobuf import empty_pb2 # type: ignore +from .base import ServiceMonitoringServiceTransport, DEFAULT_CLIENT_INFO + + +class ServiceMonitoringServiceGrpcTransport(ServiceMonitoringServiceTransport): + """gRPC backend transport for ServiceMonitoringService. + + The Cloud Monitoring Service-Oriented Monitoring API has endpoints + for managing and querying aspects of a workspace's services. These + include the ``Service``'s monitored resources, its Service-Level + Objectives, and a taxonomy of categorized Health Metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_service(self) -> Callable[ + [service_service.CreateServiceRequest], + gm_service.Service]: + r"""Return a callable for the create service method over gRPC. + + Create a ``Service``. + + Returns: + Callable[[~.CreateServiceRequest], + ~.Service]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service' not in self._stubs: + self._stubs['create_service'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/CreateService', + request_serializer=service_service.CreateServiceRequest.serialize, + response_deserializer=gm_service.Service.deserialize, + ) + return self._stubs['create_service'] + + @property + def get_service(self) -> Callable[ + [service_service.GetServiceRequest], + service.Service]: + r"""Return a callable for the get service method over gRPC. + + Get the named ``Service``. + + Returns: + Callable[[~.GetServiceRequest], + ~.Service]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service' not in self._stubs: + self._stubs['get_service'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/GetService', + request_serializer=service_service.GetServiceRequest.serialize, + response_deserializer=service.Service.deserialize, + ) + return self._stubs['get_service'] + + @property + def list_services(self) -> Callable[ + [service_service.ListServicesRequest], + service_service.ListServicesResponse]: + r"""Return a callable for the list services method over gRPC. + + List ``Service``\ s for this workspace. + + Returns: + Callable[[~.ListServicesRequest], + ~.ListServicesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_services' not in self._stubs: + self._stubs['list_services'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/ListServices', + request_serializer=service_service.ListServicesRequest.serialize, + response_deserializer=service_service.ListServicesResponse.deserialize, + ) + return self._stubs['list_services'] + + @property + def update_service(self) -> Callable[ + [service_service.UpdateServiceRequest], + gm_service.Service]: + r"""Return a callable for the update service method over gRPC. + + Update this ``Service``. + + Returns: + Callable[[~.UpdateServiceRequest], + ~.Service]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_service' not in self._stubs: + self._stubs['update_service'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/UpdateService', + request_serializer=service_service.UpdateServiceRequest.serialize, + response_deserializer=gm_service.Service.deserialize, + ) + return self._stubs['update_service'] + + @property + def delete_service(self) -> Callable[ + [service_service.DeleteServiceRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete service method over gRPC. + + Soft delete this ``Service``. + + Returns: + Callable[[~.DeleteServiceRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_service' not in self._stubs: + self._stubs['delete_service'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/DeleteService', + request_serializer=service_service.DeleteServiceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_service'] + + @property + def create_service_level_objective(self) -> Callable[ + [service_service.CreateServiceLevelObjectiveRequest], + service.ServiceLevelObjective]: + r"""Return a callable for the create service level objective method over gRPC. + + Create a ``ServiceLevelObjective`` for the given ``Service``. + + Returns: + Callable[[~.CreateServiceLevelObjectiveRequest], + ~.ServiceLevelObjective]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service_level_objective' not in self._stubs: + self._stubs['create_service_level_objective'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/CreateServiceLevelObjective', + request_serializer=service_service.CreateServiceLevelObjectiveRequest.serialize, + response_deserializer=service.ServiceLevelObjective.deserialize, + ) + return self._stubs['create_service_level_objective'] + + @property + def get_service_level_objective(self) -> Callable[ + [service_service.GetServiceLevelObjectiveRequest], + service.ServiceLevelObjective]: + r"""Return a callable for the get service level objective method over gRPC. + + Get a ``ServiceLevelObjective`` by name. + + Returns: + Callable[[~.GetServiceLevelObjectiveRequest], + ~.ServiceLevelObjective]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service_level_objective' not in self._stubs: + self._stubs['get_service_level_objective'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/GetServiceLevelObjective', + request_serializer=service_service.GetServiceLevelObjectiveRequest.serialize, + response_deserializer=service.ServiceLevelObjective.deserialize, + ) + return self._stubs['get_service_level_objective'] + + @property + def list_service_level_objectives(self) -> Callable[ + [service_service.ListServiceLevelObjectivesRequest], + service_service.ListServiceLevelObjectivesResponse]: + r"""Return a callable for the list service level objectives method over gRPC. + + List the ``ServiceLevelObjective``\ s for the given ``Service``. + + Returns: + Callable[[~.ListServiceLevelObjectivesRequest], + ~.ListServiceLevelObjectivesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_service_level_objectives' not in self._stubs: + self._stubs['list_service_level_objectives'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/ListServiceLevelObjectives', + request_serializer=service_service.ListServiceLevelObjectivesRequest.serialize, + response_deserializer=service_service.ListServiceLevelObjectivesResponse.deserialize, + ) + return self._stubs['list_service_level_objectives'] + + @property + def update_service_level_objective(self) -> Callable[ + [service_service.UpdateServiceLevelObjectiveRequest], + service.ServiceLevelObjective]: + r"""Return a callable for the update service level objective method over gRPC. + + Update the given ``ServiceLevelObjective``. + + Returns: + Callable[[~.UpdateServiceLevelObjectiveRequest], + ~.ServiceLevelObjective]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_service_level_objective' not in self._stubs: + self._stubs['update_service_level_objective'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/UpdateServiceLevelObjective', + request_serializer=service_service.UpdateServiceLevelObjectiveRequest.serialize, + response_deserializer=service.ServiceLevelObjective.deserialize, + ) + return self._stubs['update_service_level_objective'] + + @property + def delete_service_level_objective(self) -> Callable[ + [service_service.DeleteServiceLevelObjectiveRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete service level objective method over gRPC. + + Delete the given ``ServiceLevelObjective``. + + Returns: + Callable[[~.DeleteServiceLevelObjectiveRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_service_level_objective' not in self._stubs: + self._stubs['delete_service_level_objective'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/DeleteServiceLevelObjective', + request_serializer=service_service.DeleteServiceLevelObjectiveRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_service_level_objective'] + + +__all__ = ( + 'ServiceMonitoringServiceGrpcTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..b3811bbc --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py @@ -0,0 +1,496 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.monitoring_v3.types import service +from google.cloud.monitoring_v3.types import service as gm_service +from google.cloud.monitoring_v3.types import service_service +from google.protobuf import empty_pb2 # type: ignore +from .base import ServiceMonitoringServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import ServiceMonitoringServiceGrpcTransport + + +class ServiceMonitoringServiceGrpcAsyncIOTransport(ServiceMonitoringServiceTransport): + """gRPC AsyncIO backend transport for ServiceMonitoringService. + + The Cloud Monitoring Service-Oriented Monitoring API has endpoints + for managing and querying aspects of a workspace's services. These + include the ``Service``'s monitored resources, its Service-Level + Objectives, and a taxonomy of categorized Health Metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_service(self) -> Callable[ + [service_service.CreateServiceRequest], + Awaitable[gm_service.Service]]: + r"""Return a callable for the create service method over gRPC. + + Create a ``Service``. + + Returns: + Callable[[~.CreateServiceRequest], + Awaitable[~.Service]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service' not in self._stubs: + self._stubs['create_service'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/CreateService', + request_serializer=service_service.CreateServiceRequest.serialize, + response_deserializer=gm_service.Service.deserialize, + ) + return self._stubs['create_service'] + + @property + def get_service(self) -> Callable[ + [service_service.GetServiceRequest], + Awaitable[service.Service]]: + r"""Return a callable for the get service method over gRPC. + + Get the named ``Service``. + + Returns: + Callable[[~.GetServiceRequest], + Awaitable[~.Service]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service' not in self._stubs: + self._stubs['get_service'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/GetService', + request_serializer=service_service.GetServiceRequest.serialize, + response_deserializer=service.Service.deserialize, + ) + return self._stubs['get_service'] + + @property + def list_services(self) -> Callable[ + [service_service.ListServicesRequest], + Awaitable[service_service.ListServicesResponse]]: + r"""Return a callable for the list services method over gRPC. + + List ``Service``\ s for this workspace. + + Returns: + Callable[[~.ListServicesRequest], + Awaitable[~.ListServicesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_services' not in self._stubs: + self._stubs['list_services'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/ListServices', + request_serializer=service_service.ListServicesRequest.serialize, + response_deserializer=service_service.ListServicesResponse.deserialize, + ) + return self._stubs['list_services'] + + @property + def update_service(self) -> Callable[ + [service_service.UpdateServiceRequest], + Awaitable[gm_service.Service]]: + r"""Return a callable for the update service method over gRPC. + + Update this ``Service``. + + Returns: + Callable[[~.UpdateServiceRequest], + Awaitable[~.Service]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_service' not in self._stubs: + self._stubs['update_service'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/UpdateService', + request_serializer=service_service.UpdateServiceRequest.serialize, + response_deserializer=gm_service.Service.deserialize, + ) + return self._stubs['update_service'] + + @property + def delete_service(self) -> Callable[ + [service_service.DeleteServiceRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete service method over gRPC. + + Soft delete this ``Service``. + + Returns: + Callable[[~.DeleteServiceRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_service' not in self._stubs: + self._stubs['delete_service'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/DeleteService', + request_serializer=service_service.DeleteServiceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_service'] + + @property + def create_service_level_objective(self) -> Callable[ + [service_service.CreateServiceLevelObjectiveRequest], + Awaitable[service.ServiceLevelObjective]]: + r"""Return a callable for the create service level objective method over gRPC. + + Create a ``ServiceLevelObjective`` for the given ``Service``. + + Returns: + Callable[[~.CreateServiceLevelObjectiveRequest], + Awaitable[~.ServiceLevelObjective]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service_level_objective' not in self._stubs: + self._stubs['create_service_level_objective'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/CreateServiceLevelObjective', + request_serializer=service_service.CreateServiceLevelObjectiveRequest.serialize, + response_deserializer=service.ServiceLevelObjective.deserialize, + ) + return self._stubs['create_service_level_objective'] + + @property + def get_service_level_objective(self) -> Callable[ + [service_service.GetServiceLevelObjectiveRequest], + Awaitable[service.ServiceLevelObjective]]: + r"""Return a callable for the get service level objective method over gRPC. + + Get a ``ServiceLevelObjective`` by name. + + Returns: + Callable[[~.GetServiceLevelObjectiveRequest], + Awaitable[~.ServiceLevelObjective]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service_level_objective' not in self._stubs: + self._stubs['get_service_level_objective'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/GetServiceLevelObjective', + request_serializer=service_service.GetServiceLevelObjectiveRequest.serialize, + response_deserializer=service.ServiceLevelObjective.deserialize, + ) + return self._stubs['get_service_level_objective'] + + @property + def list_service_level_objectives(self) -> Callable[ + [service_service.ListServiceLevelObjectivesRequest], + Awaitable[service_service.ListServiceLevelObjectivesResponse]]: + r"""Return a callable for the list service level objectives method over gRPC. + + List the ``ServiceLevelObjective``\ s for the given ``Service``. + + Returns: + Callable[[~.ListServiceLevelObjectivesRequest], + Awaitable[~.ListServiceLevelObjectivesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_service_level_objectives' not in self._stubs: + self._stubs['list_service_level_objectives'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/ListServiceLevelObjectives', + request_serializer=service_service.ListServiceLevelObjectivesRequest.serialize, + response_deserializer=service_service.ListServiceLevelObjectivesResponse.deserialize, + ) + return self._stubs['list_service_level_objectives'] + + @property + def update_service_level_objective(self) -> Callable[ + [service_service.UpdateServiceLevelObjectiveRequest], + Awaitable[service.ServiceLevelObjective]]: + r"""Return a callable for the update service level objective method over gRPC. + + Update the given ``ServiceLevelObjective``. + + Returns: + Callable[[~.UpdateServiceLevelObjectiveRequest], + Awaitable[~.ServiceLevelObjective]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_service_level_objective' not in self._stubs: + self._stubs['update_service_level_objective'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/UpdateServiceLevelObjective', + request_serializer=service_service.UpdateServiceLevelObjectiveRequest.serialize, + response_deserializer=service.ServiceLevelObjective.deserialize, + ) + return self._stubs['update_service_level_objective'] + + @property + def delete_service_level_objective(self) -> Callable[ + [service_service.DeleteServiceLevelObjectiveRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete service level objective method over gRPC. + + Delete the given ``ServiceLevelObjective``. + + Returns: + Callable[[~.DeleteServiceLevelObjectiveRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_service_level_objective' not in self._stubs: + self._stubs['delete_service_level_objective'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.ServiceMonitoringService/DeleteServiceLevelObjective', + request_serializer=service_service.DeleteServiceLevelObjectiveRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_service_level_objective'] + + +__all__ = ( + 'ServiceMonitoringServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/__init__.py new file mode 100644 index 00000000..fd31924e --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import UptimeCheckServiceClient +from .async_client import UptimeCheckServiceAsyncClient + +__all__ = ( + 'UptimeCheckServiceClient', + 'UptimeCheckServiceAsyncClient', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py new file mode 100644 index 00000000..37f19380 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py @@ -0,0 +1,686 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.services.uptime_check_service import pagers +from google.cloud.monitoring_v3.types import uptime +from google.cloud.monitoring_v3.types import uptime_service +from google.protobuf import duration_pb2 # type: ignore +from .transports.base import UptimeCheckServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import UptimeCheckServiceGrpcAsyncIOTransport +from .client import UptimeCheckServiceClient + + +class UptimeCheckServiceAsyncClient: + """The UptimeCheckService API is used to manage (list, create, delete, + edit) Uptime check configurations in the Stackdriver Monitoring + product. An Uptime check is a piece of configuration that determines + which resources and services to monitor for availability. These + configurations can also be configured interactively by navigating to + the [Cloud Console] (http://console.cloud.google.com), selecting the + appropriate project, clicking on "Monitoring" on the left-hand side + to navigate to Stackdriver, and then clicking on "Uptime". + """ + + _client: UptimeCheckServiceClient + + DEFAULT_ENDPOINT = UptimeCheckServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = UptimeCheckServiceClient.DEFAULT_MTLS_ENDPOINT + + uptime_check_config_path = staticmethod(UptimeCheckServiceClient.uptime_check_config_path) + parse_uptime_check_config_path = staticmethod(UptimeCheckServiceClient.parse_uptime_check_config_path) + common_billing_account_path = staticmethod(UptimeCheckServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(UptimeCheckServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(UptimeCheckServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(UptimeCheckServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(UptimeCheckServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(UptimeCheckServiceClient.parse_common_organization_path) + common_project_path = staticmethod(UptimeCheckServiceClient.common_project_path) + parse_common_project_path = staticmethod(UptimeCheckServiceClient.parse_common_project_path) + common_location_path = staticmethod(UptimeCheckServiceClient.common_location_path) + parse_common_location_path = staticmethod(UptimeCheckServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UptimeCheckServiceAsyncClient: The constructed client. + """ + return UptimeCheckServiceClient.from_service_account_info.__func__(UptimeCheckServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UptimeCheckServiceAsyncClient: The constructed client. + """ + return UptimeCheckServiceClient.from_service_account_file.__func__(UptimeCheckServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> UptimeCheckServiceTransport: + """Returns the transport used by the client instance. + + Returns: + UptimeCheckServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(UptimeCheckServiceClient).get_transport_class, type(UptimeCheckServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, UptimeCheckServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the uptime check service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.UptimeCheckServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = UptimeCheckServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_uptime_check_configs(self, + request: uptime_service.ListUptimeCheckConfigsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUptimeCheckConfigsAsyncPager: + r"""Lists the existing valid Uptime check configurations + for the project (leaving out any invalid + configurations). + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest`): + The request object. The protocol for the + `ListUptimeCheckConfigs` request. + parent (:class:`str`): + Required. The + `project `__ + whose Uptime check configurations are listed. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.uptime_check_service.pagers.ListUptimeCheckConfigsAsyncPager: + The protocol for the ListUptimeCheckConfigs response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = uptime_service.ListUptimeCheckConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_uptime_check_configs, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUptimeCheckConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_uptime_check_config(self, + request: uptime_service.GetUptimeCheckConfigRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> uptime.UptimeCheckConfig: + r"""Gets a single Uptime check configuration. + + Args: + request (:class:`google.cloud.monitoring_v3.types.GetUptimeCheckConfigRequest`): + The request object. The protocol for the + `GetUptimeCheckConfig` request. + name (:class:`str`): + Required. The Uptime check configuration to retrieve. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.UptimeCheckConfig: + This message configures which + resources and services to monitor for + availability. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = uptime_service.GetUptimeCheckConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_uptime_check_config, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_uptime_check_config(self, + request: uptime_service.CreateUptimeCheckConfigRequest = None, + *, + parent: str = None, + uptime_check_config: uptime.UptimeCheckConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> uptime.UptimeCheckConfig: + r"""Creates a new Uptime check configuration. + + Args: + request (:class:`google.cloud.monitoring_v3.types.CreateUptimeCheckConfigRequest`): + The request object. The protocol for the + `CreateUptimeCheckConfig` request. + parent (:class:`str`): + Required. The + `project `__ + in which to create the Uptime check. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + uptime_check_config (:class:`google.cloud.monitoring_v3.types.UptimeCheckConfig`): + Required. The new Uptime check + configuration. + + This corresponds to the ``uptime_check_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.UptimeCheckConfig: + This message configures which + resources and services to monitor for + availability. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, uptime_check_config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = uptime_service.CreateUptimeCheckConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if uptime_check_config is not None: + request.uptime_check_config = uptime_check_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_uptime_check_config, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_uptime_check_config(self, + request: uptime_service.UpdateUptimeCheckConfigRequest = None, + *, + uptime_check_config: uptime.UptimeCheckConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> uptime.UptimeCheckConfig: + r"""Updates an Uptime check configuration. You can either replace + the entire configuration with a new one or replace only certain + fields in the current configuration by specifying the fields to + be updated via ``updateMask``. Returns the updated + configuration. + + Args: + request (:class:`google.cloud.monitoring_v3.types.UpdateUptimeCheckConfigRequest`): + The request object. The protocol for the + `UpdateUptimeCheckConfig` request. + uptime_check_config (:class:`google.cloud.monitoring_v3.types.UptimeCheckConfig`): + Required. If an ``updateMask`` has been specified, this + field gives the values for the set of fields mentioned + in the ``updateMask``. If an ``updateMask`` has not been + given, this Uptime check configuration replaces the + current configuration. If a field is mentioned in + ``updateMask`` but the corresonding field is omitted in + this partial Uptime check configuration, it has the + effect of deleting/clearing the field from the + configuration on the server. + + The following fields can be updated: ``display_name``, + ``http_check``, ``tcp_check``, ``timeout``, + ``content_matchers``, and ``selected_regions``. + + This corresponds to the ``uptime_check_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.UptimeCheckConfig: + This message configures which + resources and services to monitor for + availability. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([uptime_check_config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = uptime_service.UpdateUptimeCheckConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if uptime_check_config is not None: + request.uptime_check_config = uptime_check_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_uptime_check_config, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("uptime_check_config.name", request.uptime_check_config.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_uptime_check_config(self, + request: uptime_service.DeleteUptimeCheckConfigRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an Uptime check configuration. Note that this + method will fail if the Uptime check configuration is + referenced by an alert policy or other dependent configs + that would be rendered invalid by the deletion. + + Args: + request (:class:`google.cloud.monitoring_v3.types.DeleteUptimeCheckConfigRequest`): + The request object. The protocol for the + `DeleteUptimeCheckConfig` request. + name (:class:`str`): + Required. The Uptime check configuration to delete. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = uptime_service.DeleteUptimeCheckConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_uptime_check_config, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_uptime_check_ips(self, + request: uptime_service.ListUptimeCheckIpsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUptimeCheckIpsAsyncPager: + r"""Returns the list of IP addresses that checkers run + from + + Args: + request (:class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest`): + The request object. The protocol for the + `ListUptimeCheckIps` request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.uptime_check_service.pagers.ListUptimeCheckIpsAsyncPager: + The protocol for the ListUptimeCheckIps response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = uptime_service.ListUptimeCheckIpsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_uptime_check_ips, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUptimeCheckIpsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "UptimeCheckServiceAsyncClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/client.py new file mode 100644 index 00000000..6c221982 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/client.py @@ -0,0 +1,854 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.services.uptime_check_service import pagers +from google.cloud.monitoring_v3.types import uptime +from google.cloud.monitoring_v3.types import uptime_service +from google.protobuf import duration_pb2 # type: ignore +from .transports.base import UptimeCheckServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import UptimeCheckServiceGrpcTransport +from .transports.grpc_asyncio import UptimeCheckServiceGrpcAsyncIOTransport + + +class UptimeCheckServiceClientMeta(type): + """Metaclass for the UptimeCheckService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[UptimeCheckServiceTransport]] + _transport_registry["grpc"] = UptimeCheckServiceGrpcTransport + _transport_registry["grpc_asyncio"] = UptimeCheckServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[UptimeCheckServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class UptimeCheckServiceClient(metaclass=UptimeCheckServiceClientMeta): + """The UptimeCheckService API is used to manage (list, create, delete, + edit) Uptime check configurations in the Stackdriver Monitoring + product. An Uptime check is a piece of configuration that determines + which resources and services to monitor for availability. These + configurations can also be configured interactively by navigating to + the [Cloud Console] (http://console.cloud.google.com), selecting the + appropriate project, clicking on "Monitoring" on the left-hand side + to navigate to Stackdriver, and then clicking on "Uptime". + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "monitoring.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UptimeCheckServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UptimeCheckServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> UptimeCheckServiceTransport: + """Returns the transport used by the client instance. + + Returns: + UptimeCheckServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def uptime_check_config_path(project: str,uptime_check_config: str,) -> str: + """Returns a fully-qualified uptime_check_config string.""" + return "projects/{project}/uptimeCheckConfigs/{uptime_check_config}".format(project=project, uptime_check_config=uptime_check_config, ) + + @staticmethod + def parse_uptime_check_config_path(path: str) -> Dict[str,str]: + """Parses a uptime_check_config path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/uptimeCheckConfigs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, UptimeCheckServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the uptime check service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, UptimeCheckServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, UptimeCheckServiceTransport): + # transport is a UptimeCheckServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_uptime_check_configs(self, + request: uptime_service.ListUptimeCheckConfigsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUptimeCheckConfigsPager: + r"""Lists the existing valid Uptime check configurations + for the project (leaving out any invalid + configurations). + + Args: + request (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest): + The request object. The protocol for the + `ListUptimeCheckConfigs` request. + parent (str): + Required. The + `project `__ + whose Uptime check configurations are listed. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.uptime_check_service.pagers.ListUptimeCheckConfigsPager: + The protocol for the ListUptimeCheckConfigs response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a uptime_service.ListUptimeCheckConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, uptime_service.ListUptimeCheckConfigsRequest): + request = uptime_service.ListUptimeCheckConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_uptime_check_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUptimeCheckConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_uptime_check_config(self, + request: uptime_service.GetUptimeCheckConfigRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> uptime.UptimeCheckConfig: + r"""Gets a single Uptime check configuration. + + Args: + request (google.cloud.monitoring_v3.types.GetUptimeCheckConfigRequest): + The request object. The protocol for the + `GetUptimeCheckConfig` request. + name (str): + Required. The Uptime check configuration to retrieve. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.UptimeCheckConfig: + This message configures which + resources and services to monitor for + availability. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a uptime_service.GetUptimeCheckConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, uptime_service.GetUptimeCheckConfigRequest): + request = uptime_service.GetUptimeCheckConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_uptime_check_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_uptime_check_config(self, + request: uptime_service.CreateUptimeCheckConfigRequest = None, + *, + parent: str = None, + uptime_check_config: uptime.UptimeCheckConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> uptime.UptimeCheckConfig: + r"""Creates a new Uptime check configuration. + + Args: + request (google.cloud.monitoring_v3.types.CreateUptimeCheckConfigRequest): + The request object. The protocol for the + `CreateUptimeCheckConfig` request. + parent (str): + Required. The + `project `__ + in which to create the Uptime check. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + uptime_check_config (google.cloud.monitoring_v3.types.UptimeCheckConfig): + Required. The new Uptime check + configuration. + + This corresponds to the ``uptime_check_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.UptimeCheckConfig: + This message configures which + resources and services to monitor for + availability. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, uptime_check_config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a uptime_service.CreateUptimeCheckConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, uptime_service.CreateUptimeCheckConfigRequest): + request = uptime_service.CreateUptimeCheckConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if uptime_check_config is not None: + request.uptime_check_config = uptime_check_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_uptime_check_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_uptime_check_config(self, + request: uptime_service.UpdateUptimeCheckConfigRequest = None, + *, + uptime_check_config: uptime.UptimeCheckConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> uptime.UptimeCheckConfig: + r"""Updates an Uptime check configuration. You can either replace + the entire configuration with a new one or replace only certain + fields in the current configuration by specifying the fields to + be updated via ``updateMask``. Returns the updated + configuration. + + Args: + request (google.cloud.monitoring_v3.types.UpdateUptimeCheckConfigRequest): + The request object. The protocol for the + `UpdateUptimeCheckConfig` request. + uptime_check_config (google.cloud.monitoring_v3.types.UptimeCheckConfig): + Required. If an ``updateMask`` has been specified, this + field gives the values for the set of fields mentioned + in the ``updateMask``. If an ``updateMask`` has not been + given, this Uptime check configuration replaces the + current configuration. If a field is mentioned in + ``updateMask`` but the corresonding field is omitted in + this partial Uptime check configuration, it has the + effect of deleting/clearing the field from the + configuration on the server. + + The following fields can be updated: ``display_name``, + ``http_check``, ``tcp_check``, ``timeout``, + ``content_matchers``, and ``selected_regions``. + + This corresponds to the ``uptime_check_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.types.UptimeCheckConfig: + This message configures which + resources and services to monitor for + availability. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([uptime_check_config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a uptime_service.UpdateUptimeCheckConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, uptime_service.UpdateUptimeCheckConfigRequest): + request = uptime_service.UpdateUptimeCheckConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if uptime_check_config is not None: + request.uptime_check_config = uptime_check_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_uptime_check_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("uptime_check_config.name", request.uptime_check_config.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_uptime_check_config(self, + request: uptime_service.DeleteUptimeCheckConfigRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an Uptime check configuration. Note that this + method will fail if the Uptime check configuration is + referenced by an alert policy or other dependent configs + that would be rendered invalid by the deletion. + + Args: + request (google.cloud.monitoring_v3.types.DeleteUptimeCheckConfigRequest): + The request object. The protocol for the + `DeleteUptimeCheckConfig` request. + name (str): + Required. The Uptime check configuration to delete. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a uptime_service.DeleteUptimeCheckConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, uptime_service.DeleteUptimeCheckConfigRequest): + request = uptime_service.DeleteUptimeCheckConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_uptime_check_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_uptime_check_ips(self, + request: uptime_service.ListUptimeCheckIpsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUptimeCheckIpsPager: + r"""Returns the list of IP addresses that checkers run + from + + Args: + request (google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest): + The request object. The protocol for the + `ListUptimeCheckIps` request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.monitoring_v3.services.uptime_check_service.pagers.ListUptimeCheckIpsPager: + The protocol for the ListUptimeCheckIps response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a uptime_service.ListUptimeCheckIpsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, uptime_service.ListUptimeCheckIpsRequest): + request = uptime_service.ListUptimeCheckIpsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_uptime_check_ips] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUptimeCheckIpsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-monitoring", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "UptimeCheckServiceClient", +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/pagers.py new file mode 100644 index 00000000..59b3cba2 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/pagers.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.monitoring_v3.types import uptime +from google.cloud.monitoring_v3.types import uptime_service + + +class ListUptimeCheckConfigsPager: + """A pager for iterating through ``list_uptime_check_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``uptime_check_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUptimeCheckConfigs`` requests and continue to iterate + through the ``uptime_check_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., uptime_service.ListUptimeCheckConfigsResponse], + request: uptime_service.ListUptimeCheckConfigsRequest, + response: uptime_service.ListUptimeCheckConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = uptime_service.ListUptimeCheckConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[uptime_service.ListUptimeCheckConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[uptime.UptimeCheckConfig]: + for page in self.pages: + yield from page.uptime_check_configs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListUptimeCheckConfigsAsyncPager: + """A pager for iterating through ``list_uptime_check_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``uptime_check_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUptimeCheckConfigs`` requests and continue to iterate + through the ``uptime_check_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[uptime_service.ListUptimeCheckConfigsResponse]], + request: uptime_service.ListUptimeCheckConfigsRequest, + response: uptime_service.ListUptimeCheckConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = uptime_service.ListUptimeCheckConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[uptime_service.ListUptimeCheckConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[uptime.UptimeCheckConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.uptime_check_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListUptimeCheckIpsPager: + """A pager for iterating through ``list_uptime_check_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``uptime_check_ips`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUptimeCheckIps`` requests and continue to iterate + through the ``uptime_check_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., uptime_service.ListUptimeCheckIpsResponse], + request: uptime_service.ListUptimeCheckIpsRequest, + response: uptime_service.ListUptimeCheckIpsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = uptime_service.ListUptimeCheckIpsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[uptime_service.ListUptimeCheckIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[uptime.UptimeCheckIp]: + for page in self.pages: + yield from page.uptime_check_ips + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListUptimeCheckIpsAsyncPager: + """A pager for iterating through ``list_uptime_check_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``uptime_check_ips`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUptimeCheckIps`` requests and continue to iterate + through the ``uptime_check_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[uptime_service.ListUptimeCheckIpsResponse]], + request: uptime_service.ListUptimeCheckIpsRequest, + response: uptime_service.ListUptimeCheckIpsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest): + The initial request object. + response (google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = uptime_service.ListUptimeCheckIpsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[uptime_service.ListUptimeCheckIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[uptime.UptimeCheckIp]: + async def async_generator(): + async for page in self.pages: + for response in page.uptime_check_ips: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/__init__.py new file mode 100644 index 00000000..7f3aae1e --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import UptimeCheckServiceTransport +from .grpc import UptimeCheckServiceGrpcTransport +from .grpc_asyncio import UptimeCheckServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[UptimeCheckServiceTransport]] +_transport_registry['grpc'] = UptimeCheckServiceGrpcTransport +_transport_registry['grpc_asyncio'] = UptimeCheckServiceGrpcAsyncIOTransport + +__all__ = ( + 'UptimeCheckServiceTransport', + 'UptimeCheckServiceGrpcTransport', + 'UptimeCheckServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py new file mode 100644 index 00000000..1d597e1b --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py @@ -0,0 +1,266 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_v3.types import uptime +from google.cloud.monitoring_v3.types import uptime_service +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-monitoring', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class UptimeCheckServiceTransport(abc.ABC): + """Abstract transport class for UptimeCheckService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ) + + DEFAULT_HOST: str = 'monitoring.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_uptime_check_configs: gapic_v1.method.wrap_method( + self.list_uptime_check_configs, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.get_uptime_check_config: gapic_v1.method.wrap_method( + self.get_uptime_check_config, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.create_uptime_check_config: gapic_v1.method.wrap_method( + self.create_uptime_check_config, + default_timeout=30.0, + client_info=client_info, + ), + self.update_uptime_check_config: gapic_v1.method.wrap_method( + self.update_uptime_check_config, + default_timeout=30.0, + client_info=client_info, + ), + self.delete_uptime_check_config: gapic_v1.method.wrap_method( + self.delete_uptime_check_config, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_uptime_check_ips: gapic_v1.method.wrap_method( + self.list_uptime_check_ips, + default_retry=retries.Retry( +initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + } + + @property + def list_uptime_check_configs(self) -> Callable[ + [uptime_service.ListUptimeCheckConfigsRequest], + Union[ + uptime_service.ListUptimeCheckConfigsResponse, + Awaitable[uptime_service.ListUptimeCheckConfigsResponse] + ]]: + raise NotImplementedError() + + @property + def get_uptime_check_config(self) -> Callable[ + [uptime_service.GetUptimeCheckConfigRequest], + Union[ + uptime.UptimeCheckConfig, + Awaitable[uptime.UptimeCheckConfig] + ]]: + raise NotImplementedError() + + @property + def create_uptime_check_config(self) -> Callable[ + [uptime_service.CreateUptimeCheckConfigRequest], + Union[ + uptime.UptimeCheckConfig, + Awaitable[uptime.UptimeCheckConfig] + ]]: + raise NotImplementedError() + + @property + def update_uptime_check_config(self) -> Callable[ + [uptime_service.UpdateUptimeCheckConfigRequest], + Union[ + uptime.UptimeCheckConfig, + Awaitable[uptime.UptimeCheckConfig] + ]]: + raise NotImplementedError() + + @property + def delete_uptime_check_config(self) -> Callable[ + [uptime_service.DeleteUptimeCheckConfigRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_uptime_check_ips(self) -> Callable[ + [uptime_service.ListUptimeCheckIpsRequest], + Union[ + uptime_service.ListUptimeCheckIpsResponse, + Awaitable[uptime_service.ListUptimeCheckIpsResponse] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'UptimeCheckServiceTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py new file mode 100644 index 00000000..f7704bae --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py @@ -0,0 +1,401 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.monitoring_v3.types import uptime +from google.cloud.monitoring_v3.types import uptime_service +from google.protobuf import empty_pb2 # type: ignore +from .base import UptimeCheckServiceTransport, DEFAULT_CLIENT_INFO + + +class UptimeCheckServiceGrpcTransport(UptimeCheckServiceTransport): + """gRPC backend transport for UptimeCheckService. + + The UptimeCheckService API is used to manage (list, create, delete, + edit) Uptime check configurations in the Stackdriver Monitoring + product. An Uptime check is a piece of configuration that determines + which resources and services to monitor for availability. These + configurations can also be configured interactively by navigating to + the [Cloud Console] (http://console.cloud.google.com), selecting the + appropriate project, clicking on "Monitoring" on the left-hand side + to navigate to Stackdriver, and then clicking on "Uptime". + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_uptime_check_configs(self) -> Callable[ + [uptime_service.ListUptimeCheckConfigsRequest], + uptime_service.ListUptimeCheckConfigsResponse]: + r"""Return a callable for the list uptime check configs method over gRPC. + + Lists the existing valid Uptime check configurations + for the project (leaving out any invalid + configurations). + + Returns: + Callable[[~.ListUptimeCheckConfigsRequest], + ~.ListUptimeCheckConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_uptime_check_configs' not in self._stubs: + self._stubs['list_uptime_check_configs'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/ListUptimeCheckConfigs', + request_serializer=uptime_service.ListUptimeCheckConfigsRequest.serialize, + response_deserializer=uptime_service.ListUptimeCheckConfigsResponse.deserialize, + ) + return self._stubs['list_uptime_check_configs'] + + @property + def get_uptime_check_config(self) -> Callable[ + [uptime_service.GetUptimeCheckConfigRequest], + uptime.UptimeCheckConfig]: + r"""Return a callable for the get uptime check config method over gRPC. + + Gets a single Uptime check configuration. + + Returns: + Callable[[~.GetUptimeCheckConfigRequest], + ~.UptimeCheckConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_uptime_check_config' not in self._stubs: + self._stubs['get_uptime_check_config'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/GetUptimeCheckConfig', + request_serializer=uptime_service.GetUptimeCheckConfigRequest.serialize, + response_deserializer=uptime.UptimeCheckConfig.deserialize, + ) + return self._stubs['get_uptime_check_config'] + + @property + def create_uptime_check_config(self) -> Callable[ + [uptime_service.CreateUptimeCheckConfigRequest], + uptime.UptimeCheckConfig]: + r"""Return a callable for the create uptime check config method over gRPC. + + Creates a new Uptime check configuration. + + Returns: + Callable[[~.CreateUptimeCheckConfigRequest], + ~.UptimeCheckConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_uptime_check_config' not in self._stubs: + self._stubs['create_uptime_check_config'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/CreateUptimeCheckConfig', + request_serializer=uptime_service.CreateUptimeCheckConfigRequest.serialize, + response_deserializer=uptime.UptimeCheckConfig.deserialize, + ) + return self._stubs['create_uptime_check_config'] + + @property + def update_uptime_check_config(self) -> Callable[ + [uptime_service.UpdateUptimeCheckConfigRequest], + uptime.UptimeCheckConfig]: + r"""Return a callable for the update uptime check config method over gRPC. + + Updates an Uptime check configuration. You can either replace + the entire configuration with a new one or replace only certain + fields in the current configuration by specifying the fields to + be updated via ``updateMask``. Returns the updated + configuration. + + Returns: + Callable[[~.UpdateUptimeCheckConfigRequest], + ~.UptimeCheckConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_uptime_check_config' not in self._stubs: + self._stubs['update_uptime_check_config'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/UpdateUptimeCheckConfig', + request_serializer=uptime_service.UpdateUptimeCheckConfigRequest.serialize, + response_deserializer=uptime.UptimeCheckConfig.deserialize, + ) + return self._stubs['update_uptime_check_config'] + + @property + def delete_uptime_check_config(self) -> Callable[ + [uptime_service.DeleteUptimeCheckConfigRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete uptime check config method over gRPC. + + Deletes an Uptime check configuration. Note that this + method will fail if the Uptime check configuration is + referenced by an alert policy or other dependent configs + that would be rendered invalid by the deletion. + + Returns: + Callable[[~.DeleteUptimeCheckConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_uptime_check_config' not in self._stubs: + self._stubs['delete_uptime_check_config'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/DeleteUptimeCheckConfig', + request_serializer=uptime_service.DeleteUptimeCheckConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_uptime_check_config'] + + @property + def list_uptime_check_ips(self) -> Callable[ + [uptime_service.ListUptimeCheckIpsRequest], + uptime_service.ListUptimeCheckIpsResponse]: + r"""Return a callable for the list uptime check ips method over gRPC. + + Returns the list of IP addresses that checkers run + from + + Returns: + Callable[[~.ListUptimeCheckIpsRequest], + ~.ListUptimeCheckIpsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_uptime_check_ips' not in self._stubs: + self._stubs['list_uptime_check_ips'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/ListUptimeCheckIps', + request_serializer=uptime_service.ListUptimeCheckIpsRequest.serialize, + response_deserializer=uptime_service.ListUptimeCheckIpsResponse.deserialize, + ) + return self._stubs['list_uptime_check_ips'] + + +__all__ = ( + 'UptimeCheckServiceGrpcTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..e4b06f19 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py @@ -0,0 +1,405 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.monitoring_v3.types import uptime +from google.cloud.monitoring_v3.types import uptime_service +from google.protobuf import empty_pb2 # type: ignore +from .base import UptimeCheckServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import UptimeCheckServiceGrpcTransport + + +class UptimeCheckServiceGrpcAsyncIOTransport(UptimeCheckServiceTransport): + """gRPC AsyncIO backend transport for UptimeCheckService. + + The UptimeCheckService API is used to manage (list, create, delete, + edit) Uptime check configurations in the Stackdriver Monitoring + product. An Uptime check is a piece of configuration that determines + which resources and services to monitor for availability. These + configurations can also be configured interactively by navigating to + the [Cloud Console] (http://console.cloud.google.com), selecting the + appropriate project, clicking on "Monitoring" on the left-hand side + to navigate to Stackdriver, and then clicking on "Uptime". + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'monitoring.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_uptime_check_configs(self) -> Callable[ + [uptime_service.ListUptimeCheckConfigsRequest], + Awaitable[uptime_service.ListUptimeCheckConfigsResponse]]: + r"""Return a callable for the list uptime check configs method over gRPC. + + Lists the existing valid Uptime check configurations + for the project (leaving out any invalid + configurations). + + Returns: + Callable[[~.ListUptimeCheckConfigsRequest], + Awaitable[~.ListUptimeCheckConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_uptime_check_configs' not in self._stubs: + self._stubs['list_uptime_check_configs'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/ListUptimeCheckConfigs', + request_serializer=uptime_service.ListUptimeCheckConfigsRequest.serialize, + response_deserializer=uptime_service.ListUptimeCheckConfigsResponse.deserialize, + ) + return self._stubs['list_uptime_check_configs'] + + @property + def get_uptime_check_config(self) -> Callable[ + [uptime_service.GetUptimeCheckConfigRequest], + Awaitable[uptime.UptimeCheckConfig]]: + r"""Return a callable for the get uptime check config method over gRPC. + + Gets a single Uptime check configuration. + + Returns: + Callable[[~.GetUptimeCheckConfigRequest], + Awaitable[~.UptimeCheckConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_uptime_check_config' not in self._stubs: + self._stubs['get_uptime_check_config'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/GetUptimeCheckConfig', + request_serializer=uptime_service.GetUptimeCheckConfigRequest.serialize, + response_deserializer=uptime.UptimeCheckConfig.deserialize, + ) + return self._stubs['get_uptime_check_config'] + + @property + def create_uptime_check_config(self) -> Callable[ + [uptime_service.CreateUptimeCheckConfigRequest], + Awaitable[uptime.UptimeCheckConfig]]: + r"""Return a callable for the create uptime check config method over gRPC. + + Creates a new Uptime check configuration. + + Returns: + Callable[[~.CreateUptimeCheckConfigRequest], + Awaitable[~.UptimeCheckConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_uptime_check_config' not in self._stubs: + self._stubs['create_uptime_check_config'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/CreateUptimeCheckConfig', + request_serializer=uptime_service.CreateUptimeCheckConfigRequest.serialize, + response_deserializer=uptime.UptimeCheckConfig.deserialize, + ) + return self._stubs['create_uptime_check_config'] + + @property + def update_uptime_check_config(self) -> Callable[ + [uptime_service.UpdateUptimeCheckConfigRequest], + Awaitable[uptime.UptimeCheckConfig]]: + r"""Return a callable for the update uptime check config method over gRPC. + + Updates an Uptime check configuration. You can either replace + the entire configuration with a new one or replace only certain + fields in the current configuration by specifying the fields to + be updated via ``updateMask``. Returns the updated + configuration. + + Returns: + Callable[[~.UpdateUptimeCheckConfigRequest], + Awaitable[~.UptimeCheckConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_uptime_check_config' not in self._stubs: + self._stubs['update_uptime_check_config'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/UpdateUptimeCheckConfig', + request_serializer=uptime_service.UpdateUptimeCheckConfigRequest.serialize, + response_deserializer=uptime.UptimeCheckConfig.deserialize, + ) + return self._stubs['update_uptime_check_config'] + + @property + def delete_uptime_check_config(self) -> Callable[ + [uptime_service.DeleteUptimeCheckConfigRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete uptime check config method over gRPC. + + Deletes an Uptime check configuration. Note that this + method will fail if the Uptime check configuration is + referenced by an alert policy or other dependent configs + that would be rendered invalid by the deletion. + + Returns: + Callable[[~.DeleteUptimeCheckConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_uptime_check_config' not in self._stubs: + self._stubs['delete_uptime_check_config'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/DeleteUptimeCheckConfig', + request_serializer=uptime_service.DeleteUptimeCheckConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_uptime_check_config'] + + @property + def list_uptime_check_ips(self) -> Callable[ + [uptime_service.ListUptimeCheckIpsRequest], + Awaitable[uptime_service.ListUptimeCheckIpsResponse]]: + r"""Return a callable for the list uptime check ips method over gRPC. + + Returns the list of IP addresses that checkers run + from + + Returns: + Callable[[~.ListUptimeCheckIpsRequest], + Awaitable[~.ListUptimeCheckIpsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_uptime_check_ips' not in self._stubs: + self._stubs['list_uptime_check_ips'] = self.grpc_channel.unary_unary( + '/google.monitoring.v3.UptimeCheckService/ListUptimeCheckIps', + request_serializer=uptime_service.ListUptimeCheckIpsRequest.serialize, + response_deserializer=uptime_service.ListUptimeCheckIpsResponse.deserialize, + ) + return self._stubs['list_uptime_check_ips'] + + +__all__ = ( + 'UptimeCheckServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/__init__.py new file mode 100644 index 00000000..fe3a76e2 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/__init__.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .alert import ( + AlertPolicy, +) +from .alert_service import ( + CreateAlertPolicyRequest, + DeleteAlertPolicyRequest, + GetAlertPolicyRequest, + ListAlertPoliciesRequest, + ListAlertPoliciesResponse, + UpdateAlertPolicyRequest, +) +from .common import ( + Aggregation, + TimeInterval, + TypedValue, + ComparisonType, +) +from .dropped_labels import ( + DroppedLabels, +) +from .group import ( + Group, +) +from .group_service import ( + CreateGroupRequest, + DeleteGroupRequest, + GetGroupRequest, + ListGroupMembersRequest, + ListGroupMembersResponse, + ListGroupsRequest, + ListGroupsResponse, + UpdateGroupRequest, +) +from .metric import ( + LabelValue, + Point, + QueryError, + TextLocator, + TimeSeries, + TimeSeriesData, + TimeSeriesDescriptor, +) +from .metric_service import ( + CreateMetricDescriptorRequest, + CreateTimeSeriesError, + CreateTimeSeriesRequest, + CreateTimeSeriesSummary, + DeleteMetricDescriptorRequest, + GetMetricDescriptorRequest, + GetMonitoredResourceDescriptorRequest, + ListMetricDescriptorsRequest, + ListMetricDescriptorsResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + ListTimeSeriesRequest, + ListTimeSeriesResponse, + QueryErrorList, + QueryTimeSeriesRequest, + QueryTimeSeriesResponse, +) +from .mutation_record import ( + MutationRecord, +) +from .notification import ( + NotificationChannel, + NotificationChannelDescriptor, +) +from .notification_service import ( + CreateNotificationChannelRequest, + DeleteNotificationChannelRequest, + GetNotificationChannelDescriptorRequest, + GetNotificationChannelRequest, + GetNotificationChannelVerificationCodeRequest, + GetNotificationChannelVerificationCodeResponse, + ListNotificationChannelDescriptorsRequest, + ListNotificationChannelDescriptorsResponse, + ListNotificationChannelsRequest, + ListNotificationChannelsResponse, + SendNotificationChannelVerificationCodeRequest, + UpdateNotificationChannelRequest, + VerifyNotificationChannelRequest, +) +from .service import ( + BasicSli, + DistributionCut, + Range, + RequestBasedSli, + Service, + ServiceLevelIndicator, + ServiceLevelObjective, + TimeSeriesRatio, + WindowsBasedSli, +) +from .service_service import ( + CreateServiceLevelObjectiveRequest, + CreateServiceRequest, + DeleteServiceLevelObjectiveRequest, + DeleteServiceRequest, + GetServiceLevelObjectiveRequest, + GetServiceRequest, + ListServiceLevelObjectivesRequest, + ListServiceLevelObjectivesResponse, + ListServicesRequest, + ListServicesResponse, + UpdateServiceLevelObjectiveRequest, + UpdateServiceRequest, +) +from .span_context import ( + SpanContext, +) +from .uptime import ( + InternalChecker, + UptimeCheckConfig, + UptimeCheckIp, + GroupResourceType, + UptimeCheckRegion, +) +from .uptime_service import ( + CreateUptimeCheckConfigRequest, + DeleteUptimeCheckConfigRequest, + GetUptimeCheckConfigRequest, + ListUptimeCheckConfigsRequest, + ListUptimeCheckConfigsResponse, + ListUptimeCheckIpsRequest, + ListUptimeCheckIpsResponse, + UpdateUptimeCheckConfigRequest, +) + +__all__ = ( + 'AlertPolicy', + 'CreateAlertPolicyRequest', + 'DeleteAlertPolicyRequest', + 'GetAlertPolicyRequest', + 'ListAlertPoliciesRequest', + 'ListAlertPoliciesResponse', + 'UpdateAlertPolicyRequest', + 'Aggregation', + 'TimeInterval', + 'TypedValue', + 'ComparisonType', + 'DroppedLabels', + 'Group', + 'CreateGroupRequest', + 'DeleteGroupRequest', + 'GetGroupRequest', + 'ListGroupMembersRequest', + 'ListGroupMembersResponse', + 'ListGroupsRequest', + 'ListGroupsResponse', + 'UpdateGroupRequest', + 'LabelValue', + 'Point', + 'QueryError', + 'TextLocator', + 'TimeSeries', + 'TimeSeriesData', + 'TimeSeriesDescriptor', + 'CreateMetricDescriptorRequest', + 'CreateTimeSeriesError', + 'CreateTimeSeriesRequest', + 'CreateTimeSeriesSummary', + 'DeleteMetricDescriptorRequest', + 'GetMetricDescriptorRequest', + 'GetMonitoredResourceDescriptorRequest', + 'ListMetricDescriptorsRequest', + 'ListMetricDescriptorsResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'ListTimeSeriesRequest', + 'ListTimeSeriesResponse', + 'QueryErrorList', + 'QueryTimeSeriesRequest', + 'QueryTimeSeriesResponse', + 'MutationRecord', + 'NotificationChannel', + 'NotificationChannelDescriptor', + 'CreateNotificationChannelRequest', + 'DeleteNotificationChannelRequest', + 'GetNotificationChannelDescriptorRequest', + 'GetNotificationChannelRequest', + 'GetNotificationChannelVerificationCodeRequest', + 'GetNotificationChannelVerificationCodeResponse', + 'ListNotificationChannelDescriptorsRequest', + 'ListNotificationChannelDescriptorsResponse', + 'ListNotificationChannelsRequest', + 'ListNotificationChannelsResponse', + 'SendNotificationChannelVerificationCodeRequest', + 'UpdateNotificationChannelRequest', + 'VerifyNotificationChannelRequest', + 'BasicSli', + 'DistributionCut', + 'Range', + 'RequestBasedSli', + 'Service', + 'ServiceLevelIndicator', + 'ServiceLevelObjective', + 'TimeSeriesRatio', + 'WindowsBasedSli', + 'CreateServiceLevelObjectiveRequest', + 'CreateServiceRequest', + 'DeleteServiceLevelObjectiveRequest', + 'DeleteServiceRequest', + 'GetServiceLevelObjectiveRequest', + 'GetServiceRequest', + 'ListServiceLevelObjectivesRequest', + 'ListServiceLevelObjectivesResponse', + 'ListServicesRequest', + 'ListServicesResponse', + 'UpdateServiceLevelObjectiveRequest', + 'UpdateServiceRequest', + 'SpanContext', + 'InternalChecker', + 'UptimeCheckConfig', + 'UptimeCheckIp', + 'GroupResourceType', + 'UptimeCheckRegion', + 'CreateUptimeCheckConfigRequest', + 'DeleteUptimeCheckConfigRequest', + 'GetUptimeCheckConfigRequest', + 'ListUptimeCheckConfigsRequest', + 'ListUptimeCheckConfigsResponse', + 'ListUptimeCheckIpsRequest', + 'ListUptimeCheckIpsResponse', + 'UpdateUptimeCheckConfigRequest', +) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert.py new file mode 100644 index 00000000..1e3996e8 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert.py @@ -0,0 +1,654 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.monitoring_v3.types import common +from google.cloud.monitoring_v3.types import mutation_record as gm_mutation_record +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'AlertPolicy', + }, +) + + +class AlertPolicy(proto.Message): + r"""A description of the conditions under which some aspect of your + system is considered to be "unhealthy" and the ways to notify people + or services about this state. For an overview of alert policies, see + `Introduction to + Alerting `__. + + Attributes: + name (str): + Required if the policy exists. The resource name for this + policy. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] + + ``[ALERT_POLICY_ID]`` is assigned by Stackdriver Monitoring + when the policy is created. When calling the + [alertPolicies.create][google.monitoring.v3.AlertPolicyService.CreateAlertPolicy] + method, do not include the ``name`` field in the alerting + policy passed as part of the request. + display_name (str): + A short name or phrase used to identify the + policy in dashboards, notifications, and + incidents. To avoid confusion, don't use the + same display name for multiple policies in the + same project. The name is limited to 512 Unicode + characters. + documentation (google.cloud.monitoring_v3.types.AlertPolicy.Documentation): + Documentation that is included with + notifications and incidents related to this + policy. Best practice is for the documentation + to include information to help responders + understand, mitigate, escalate, and correct the + underlying problems detected by the alerting + policy. Notification channels that have limited + capacity might not show this documentation. + user_labels (Sequence[google.cloud.monitoring_v3.types.AlertPolicy.UserLabelsEntry]): + User-supplied key/value data to be used for organizing and + identifying the ``AlertPolicy`` objects. + + The field can contain up to 64 entries. Each key and value + is limited to 63 Unicode characters or 128 bytes, whichever + is smaller. Labels and values can contain only lowercase + letters, numerals, underscores, and dashes. Keys must begin + with a letter. + conditions (Sequence[google.cloud.monitoring_v3.types.AlertPolicy.Condition]): + A list of conditions for the policy. The conditions are + combined by AND or OR according to the ``combiner`` field. + If the combined conditions evaluate to true, then an + incident is created. A policy can have from one to six + conditions. If ``condition_time_series_query_language`` is + present, it must be the only ``condition``. + combiner (google.cloud.monitoring_v3.types.AlertPolicy.ConditionCombinerType): + How to combine the results of multiple conditions to + determine if an incident should be opened. If + ``condition_time_series_query_language`` is present, this + must be ``COMBINE_UNSPECIFIED``. + enabled (google.protobuf.wrappers_pb2.BoolValue): + Whether or not the policy is enabled. On + write, the default interpretation if unset is + that the policy is enabled. On read, clients + should not make any assumption about the state + if it has not been populated. The field should + always be populated on List and Get operations, + unless a field projection has been specified + that strips it out. + validity (google.rpc.status_pb2.Status): + Read-only description of how the alert policy + is invalid. OK if the alert policy is valid. If + not OK, the alert policy will not generate + incidents. + notification_channels (Sequence[str]): + Identifies the notification channels to which notifications + should be sent when incidents are opened or closed or when + new violations occur on an already opened incident. Each + element of this array corresponds to the ``name`` field in + each of the + [``NotificationChannel``][google.monitoring.v3.NotificationChannel] + objects that are returned from the + [``ListNotificationChannels``] + [google.monitoring.v3.NotificationChannelService.ListNotificationChannels] + method. The format of the entries in this field is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] + creation_record (google.cloud.monitoring_v3.types.MutationRecord): + A read-only record of the creation of the + alerting policy. If provided in a call to create + or update, this field will be ignored. + mutation_record (google.cloud.monitoring_v3.types.MutationRecord): + A read-only record of the most recent change + to the alerting policy. If provided in a call to + create or update, this field will be ignored. + alert_strategy (google.cloud.monitoring_v3.types.AlertPolicy.AlertStrategy): + Control over how this alert policy's + notification channels are notified. + """ + class ConditionCombinerType(proto.Enum): + r"""Operators for combining conditions.""" + COMBINE_UNSPECIFIED = 0 + AND = 1 + OR = 2 + AND_WITH_MATCHING_RESOURCE = 3 + + class Documentation(proto.Message): + r"""A content string and a MIME type that describes the content + string's format. + + Attributes: + content (str): + The text of the documentation, interpreted according to + ``mime_type``. The content may not exceed 8,192 Unicode + characters and may not exceed more than 10,240 bytes when + encoded in UTF-8 format, whichever is smaller. + mime_type (str): + The format of the ``content`` field. Presently, only the + value ``"text/markdown"`` is supported. See + `Markdown `__ for + more information. + """ + + content = proto.Field( + proto.STRING, + number=1, + ) + mime_type = proto.Field( + proto.STRING, + number=2, + ) + + class Condition(proto.Message): + r"""A condition is a true/false test that determines when an + alerting policy should open an incident. If a condition + evaluates to true, it signifies that something is wrong. + + Attributes: + name (str): + Required if the condition exists. The unique resource name + for this condition. Its format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[POLICY_ID]/conditions/[CONDITION_ID] + + ``[CONDITION_ID]`` is assigned by Stackdriver Monitoring + when the condition is created as part of a new or updated + alerting policy. + + When calling the + [alertPolicies.create][google.monitoring.v3.AlertPolicyService.CreateAlertPolicy] + method, do not include the ``name`` field in the conditions + of the requested alerting policy. Stackdriver Monitoring + creates the condition identifiers and includes them in the + new policy. + + When calling the + [alertPolicies.update][google.monitoring.v3.AlertPolicyService.UpdateAlertPolicy] + method to update a policy, including a condition ``name`` + causes the existing condition to be updated. Conditions + without names are added to the updated policy. Existing + conditions are deleted if they are not updated. + + Best practice is to preserve ``[CONDITION_ID]`` if you make + only small changes, such as those to condition thresholds, + durations, or trigger values. Otherwise, treat the change as + a new condition and let the existing condition be deleted. + display_name (str): + A short name or phrase used to identify the + condition in dashboards, notifications, and + incidents. To avoid confusion, don't use the + same display name for multiple conditions in the + same policy. + condition_threshold (google.cloud.monitoring_v3.types.AlertPolicy.Condition.MetricThreshold): + A condition that compares a time series + against a threshold. + condition_absent (google.cloud.monitoring_v3.types.AlertPolicy.Condition.MetricAbsence): + A condition that checks that a time series + continues to receive new data points. + condition_matched_log (google.cloud.monitoring_v3.types.AlertPolicy.Condition.LogMatch): + A condition that checks for log messages + matching given constraints. If set, no other + conditions can be present. + condition_monitoring_query_language (google.cloud.monitoring_v3.types.AlertPolicy.Condition.MonitoringQueryLanguageCondition): + A condition that uses the Monitoring Query + Language to define alerts. + """ + + class Trigger(proto.Message): + r"""Specifies how many time series must fail a predicate to trigger a + condition. If not specified, then a ``{count: 1}`` trigger is used. + + Attributes: + count (int): + The absolute number of time series that must + fail the predicate for the condition to be + triggered. + percent (float): + The percentage of time series that must fail + the predicate for the condition to be triggered. + """ + + count = proto.Field( + proto.INT32, + number=1, + oneof='type', + ) + percent = proto.Field( + proto.DOUBLE, + number=2, + oneof='type', + ) + + class MetricThreshold(proto.Message): + r"""A condition type that compares a collection of time series + against a threshold. + + Attributes: + filter (str): + Required. A + `filter `__ + that identifies which time series should be compared with + the threshold. + + The filter is similar to the one that is specified in the + ```ListTimeSeries`` + request `__ + (that call is useful to verify the time series that will be + retrieved / processed). The filter must specify the metric + type and the resource type. Optionally, it can specify + resource labels and metric labels. This field must not + exceed 2048 Unicode characters in length. + aggregations (Sequence[google.cloud.monitoring_v3.types.Aggregation]): + Specifies the alignment of data points in individual time + series as well as how to combine the retrieved time series + together (such as when aggregating multiple streams on each + resource to a single stream for each resource or when + aggregating streams across all members of a group of + resrouces). Multiple aggregations are applied in the order + specified. + + This field is similar to the one in the ```ListTimeSeries`` + request `__. + It is advisable to use the ``ListTimeSeries`` method when + debugging this field. + denominator_filter (str): + A + `filter `__ + that identifies a time series that should be used as the + denominator of a ratio that will be compared with the + threshold. If a ``denominator_filter`` is specified, the + time series specified by the ``filter`` field will be used + as the numerator. + + The filter must specify the metric type and optionally may + contain restrictions on resource type, resource labels, and + metric labels. This field may not exceed 2048 Unicode + characters in length. + denominator_aggregations (Sequence[google.cloud.monitoring_v3.types.Aggregation]): + Specifies the alignment of data points in individual time + series selected by ``denominatorFilter`` as well as how to + combine the retrieved time series together (such as when + aggregating multiple streams on each resource to a single + stream for each resource or when aggregating streams across + all members of a group of resources). + + When computing ratios, the ``aggregations`` and + ``denominator_aggregations`` fields must use the same + alignment period and produce time series that have the same + periodicity and labels. + comparison (google.cloud.monitoring_v3.types.ComparisonType): + The comparison to apply between the time series (indicated + by ``filter`` and ``aggregation``) and the threshold + (indicated by ``threshold_value``). The comparison is + applied on each time series, with the time series on the + left-hand side and the threshold on the right-hand side. + + Only ``COMPARISON_LT`` and ``COMPARISON_GT`` are supported + currently. + threshold_value (float): + A value against which to compare the time + series. + duration (google.protobuf.duration_pb2.Duration): + The amount of time that a time series must violate the + threshold to be considered failing. Currently, only values + that are a multiple of a minute--e.g., 0, 60, 120, or 300 + seconds--are supported. If an invalid value is given, an + error will be returned. When choosing a duration, it is + useful to keep in mind the frequency of the underlying time + series data (which may also be affected by any alignments + specified in the ``aggregations`` field); a good duration is + long enough so that a single outlier does not generate + spurious alerts, but short enough that unhealthy states are + detected and alerted on quickly. + trigger (google.cloud.monitoring_v3.types.AlertPolicy.Condition.Trigger): + The number/percent of time series for which the comparison + must hold in order for the condition to trigger. If + unspecified, then the condition will trigger if the + comparison is true for any of the time series that have been + identified by ``filter`` and ``aggregations``, or by the + ratio, if ``denominator_filter`` and + ``denominator_aggregations`` are specified. + """ + + filter = proto.Field( + proto.STRING, + number=2, + ) + aggregations = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=common.Aggregation, + ) + denominator_filter = proto.Field( + proto.STRING, + number=9, + ) + denominator_aggregations = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=common.Aggregation, + ) + comparison = proto.Field( + proto.ENUM, + number=4, + enum=common.ComparisonType, + ) + threshold_value = proto.Field( + proto.DOUBLE, + number=5, + ) + duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + trigger = proto.Field( + proto.MESSAGE, + number=7, + message='AlertPolicy.Condition.Trigger', + ) + + class MetricAbsence(proto.Message): + r"""A condition type that checks that monitored resources are reporting + data. The configuration defines a metric and a set of monitored + resources. The predicate is considered in violation when a time + series for the specified metric of a monitored resource does not + include any data in the specified ``duration``. + + Attributes: + filter (str): + Required. A + `filter `__ + that identifies which time series should be compared with + the threshold. + + The filter is similar to the one that is specified in the + ```ListTimeSeries`` + request `__ + (that call is useful to verify the time series that will be + retrieved / processed). The filter must specify the metric + type and the resource type. Optionally, it can specify + resource labels and metric labels. This field must not + exceed 2048 Unicode characters in length. + aggregations (Sequence[google.cloud.monitoring_v3.types.Aggregation]): + Specifies the alignment of data points in individual time + series as well as how to combine the retrieved time series + together (such as when aggregating multiple streams on each + resource to a single stream for each resource or when + aggregating streams across all members of a group of + resrouces). Multiple aggregations are applied in the order + specified. + + This field is similar to the one in the ```ListTimeSeries`` + request `__. + It is advisable to use the ``ListTimeSeries`` method when + debugging this field. + duration (google.protobuf.duration_pb2.Duration): + The amount of time that a time series must fail to report + new data to be considered failing. The minimum value of this + field is 120 seconds. Larger values that are a multiple of a + minute--for example, 240 or 300 seconds--are supported. If + an invalid value is given, an error will be returned. The + ``Duration.nanos`` field is ignored. + trigger (google.cloud.monitoring_v3.types.AlertPolicy.Condition.Trigger): + The number/percent of time series for which the comparison + must hold in order for the condition to trigger. If + unspecified, then the condition will trigger if the + comparison is true for any of the time series that have been + identified by ``filter`` and ``aggregations``. + """ + + filter = proto.Field( + proto.STRING, + number=1, + ) + aggregations = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=common.Aggregation, + ) + duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + trigger = proto.Field( + proto.MESSAGE, + number=3, + message='AlertPolicy.Condition.Trigger', + ) + + class LogMatch(proto.Message): + r"""A condition type that checks whether a log message in the `scoping + project `__ + satisfies the given filter. Logs from other projects in the metrics + scope are not evaluated. + + Attributes: + filter (str): + Required. A logs-based filter. See `Advanced Logs + Queries `__ for how + this filter should be constructed. + label_extractors (Sequence[google.cloud.monitoring_v3.types.AlertPolicy.Condition.LogMatch.LabelExtractorsEntry]): + Optional. A map from a label key to an extractor expression, + which is used to extract the value for this label key. Each + entry in this map is a specification for how data should be + extracted from log entries that match ``filter``. Each + combination of extracted values is treated as a separate + rule for the purposes of triggering notifications. Label + keys and corresponding values can be used in notifications + generated by this condition. + + Please see `the documentation on logs-based metric + ``valueExtractor``\ s `__ + for syntax and examples. + """ + + filter = proto.Field( + proto.STRING, + number=1, + ) + label_extractors = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + class MonitoringQueryLanguageCondition(proto.Message): + r"""A condition type that allows alert policies to be defined using + `Monitoring Query + Language `__. + + Attributes: + query (str): + `Monitoring Query + Language `__ query + that outputs a boolean stream. + duration (google.protobuf.duration_pb2.Duration): + The amount of time that a time series must violate the + threshold to be considered failing. Currently, only values + that are a multiple of a minute--e.g., 0, 60, 120, or 300 + seconds--are supported. If an invalid value is given, an + error will be returned. When choosing a duration, it is + useful to keep in mind the frequency of the underlying time + series data (which may also be affected by any alignments + specified in the ``aggregations`` field); a good duration is + long enough so that a single outlier does not generate + spurious alerts, but short enough that unhealthy states are + detected and alerted on quickly. + trigger (google.cloud.monitoring_v3.types.AlertPolicy.Condition.Trigger): + The number/percent of time series for which the comparison + must hold in order for the condition to trigger. If + unspecified, then the condition will trigger if the + comparison is true for any of the time series that have been + identified by ``filter`` and ``aggregations``, or by the + ratio, if ``denominator_filter`` and + ``denominator_aggregations`` are specified. + """ + + query = proto.Field( + proto.STRING, + number=1, + ) + duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + trigger = proto.Field( + proto.MESSAGE, + number=3, + message='AlertPolicy.Condition.Trigger', + ) + + name = proto.Field( + proto.STRING, + number=12, + ) + display_name = proto.Field( + proto.STRING, + number=6, + ) + condition_threshold = proto.Field( + proto.MESSAGE, + number=1, + oneof='condition', + message='AlertPolicy.Condition.MetricThreshold', + ) + condition_absent = proto.Field( + proto.MESSAGE, + number=2, + oneof='condition', + message='AlertPolicy.Condition.MetricAbsence', + ) + condition_matched_log = proto.Field( + proto.MESSAGE, + number=20, + oneof='condition', + message='AlertPolicy.Condition.LogMatch', + ) + condition_monitoring_query_language = proto.Field( + proto.MESSAGE, + number=19, + oneof='condition', + message='AlertPolicy.Condition.MonitoringQueryLanguageCondition', + ) + + class AlertStrategy(proto.Message): + r"""Control over how the notification channels in + ``notification_channels`` are notified when this alert fires. + + Attributes: + notification_rate_limit (google.cloud.monitoring_v3.types.AlertPolicy.AlertStrategy.NotificationRateLimit): + Required for alert policies with a ``LogMatch`` condition. + + This limit is not implemented for alert policies that are + not log-based. + """ + + class NotificationRateLimit(proto.Message): + r"""Control over the rate of notifications sent to this alert + policy's notification channels. + + Attributes: + period (google.protobuf.duration_pb2.Duration): + Not more than one notification per ``period``. + """ + + period = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + notification_rate_limit = proto.Field( + proto.MESSAGE, + number=1, + message='AlertPolicy.AlertStrategy.NotificationRateLimit', + ) + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + documentation = proto.Field( + proto.MESSAGE, + number=13, + message=Documentation, + ) + user_labels = proto.MapField( + proto.STRING, + proto.STRING, + number=16, + ) + conditions = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=Condition, + ) + combiner = proto.Field( + proto.ENUM, + number=6, + enum=ConditionCombinerType, + ) + enabled = proto.Field( + proto.MESSAGE, + number=17, + message=wrappers_pb2.BoolValue, + ) + validity = proto.Field( + proto.MESSAGE, + number=18, + message=status_pb2.Status, + ) + notification_channels = proto.RepeatedField( + proto.STRING, + number=14, + ) + creation_record = proto.Field( + proto.MESSAGE, + number=10, + message=gm_mutation_record.MutationRecord, + ) + mutation_record = proto.Field( + proto.MESSAGE, + number=11, + message=gm_mutation_record.MutationRecord, + ) + alert_strategy = proto.Field( + proto.MESSAGE, + number=21, + message=AlertStrategy, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert_service.py new file mode 100644 index 00000000..344e0a38 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert_service.py @@ -0,0 +1,257 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.monitoring_v3.types import alert +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'CreateAlertPolicyRequest', + 'GetAlertPolicyRequest', + 'ListAlertPoliciesRequest', + 'ListAlertPoliciesResponse', + 'UpdateAlertPolicyRequest', + 'DeleteAlertPolicyRequest', + }, +) + + +class CreateAlertPolicyRequest(proto.Message): + r"""The protocol for the ``CreateAlertPolicy`` request. + Attributes: + name (str): + Required. The + `project `__ + in which to create the alerting policy. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + Note that this field names the parent container in which the + alerting policy will be written, not the name of the created + policy. \|name\| must be a host project of a workspace, + otherwise INVALID_ARGUMENT error will return. The alerting + policy that is returned will have a name that contains a + normalized representation of this name as a prefix but adds + a suffix of the form ``/alertPolicies/[ALERT_POLICY_ID]``, + identifying the policy in the container. + alert_policy (google.cloud.monitoring_v3.types.AlertPolicy): + Required. The requested alerting policy. You should omit the + ``name`` field in this policy. The name will be returned in + the new policy, including a new ``[ALERT_POLICY_ID]`` value. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + alert_policy = proto.Field( + proto.MESSAGE, + number=2, + message=alert.AlertPolicy, + ) + + +class GetAlertPolicyRequest(proto.Message): + r"""The protocol for the ``GetAlertPolicy`` request. + Attributes: + name (str): + Required. The alerting policy to retrieve. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAlertPoliciesRequest(proto.Message): + r"""The protocol for the ``ListAlertPolicies`` request. + Attributes: + name (str): + Required. The + `project `__ + whose alert policies are to be listed. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + Note that this field names the parent container in which the + alerting policies to be listed are stored. To retrieve a + single alerting policy by name, use the + [GetAlertPolicy][google.monitoring.v3.AlertPolicyService.GetAlertPolicy] + operation, instead. + filter (str): + If provided, this field specifies the criteria that must be + met by alert policies to be included in the response. + + For more details, see `sorting and + filtering `__. + order_by (str): + A comma-separated list of fields by which to sort the + result. Supports the same set of field references as the + ``filter`` field. Entries can be prefixed with a minus sign + to sort by the field in descending order. + + For more details, see `sorting and + filtering `__. + page_size (int): + The maximum number of results to return in a + single response. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return more + results from the previous method call. + """ + + name = proto.Field( + proto.STRING, + number=4, + ) + filter = proto.Field( + proto.STRING, + number=5, + ) + order_by = proto.Field( + proto.STRING, + number=6, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAlertPoliciesResponse(proto.Message): + r"""The protocol for the ``ListAlertPolicies`` response. + Attributes: + alert_policies (Sequence[google.cloud.monitoring_v3.types.AlertPolicy]): + The returned alert policies. + next_page_token (str): + If there might be more results than were returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + total_size (int): + The total number of alert policies in all + pages. This number is only an estimate, and may + change in subsequent pages. https://aip.dev/158 + """ + + @property + def raw_page(self): + return self + + alert_policies = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=alert.AlertPolicy, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + total_size = proto.Field( + proto.INT32, + number=4, + ) + + +class UpdateAlertPolicyRequest(proto.Message): + r"""The protocol for the ``UpdateAlertPolicy`` request. + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. A list of alerting policy field names. If this + field is not empty, each listed field in the existing + alerting policy is set to the value of the corresponding + field in the supplied policy (``alert_policy``), or to the + field's default value if the field is not in the supplied + alerting policy. Fields not listed retain their previous + value. + + Examples of valid field masks include ``display_name``, + ``documentation``, ``documentation.content``, + ``documentation.mime_type``, ``user_labels``, + ``user_label.nameofkey``, ``enabled``, ``conditions``, + ``combiner``, etc. + + If this field is empty, then the supplied alerting policy + replaces the existing policy. It is the same as deleting the + existing policy and adding the supplied policy, except for + the following: + + - The new policy will have the same ``[ALERT_POLICY_ID]`` + as the former policy. This gives you continuity with the + former policy in your notifications and incidents. + - Conditions in the new policy will keep their former + ``[CONDITION_ID]`` if the supplied condition includes the + ``name`` field with that ``[CONDITION_ID]``. If the + supplied condition omits the ``name`` field, then a new + ``[CONDITION_ID]`` is created. + alert_policy (google.cloud.monitoring_v3.types.AlertPolicy): + Required. The updated alerting policy or the updated values + for the fields listed in ``update_mask``. If ``update_mask`` + is not empty, any fields in this policy that are not in + ``update_mask`` are ignored. + """ + + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + alert_policy = proto.Field( + proto.MESSAGE, + number=3, + message=alert.AlertPolicy, + ) + + +class DeleteAlertPolicyRequest(proto.Message): + r"""The protocol for the ``DeleteAlertPolicy`` request. + Attributes: + name (str): + Required. The alerting policy to delete. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] + + For more information, see + [AlertPolicy][google.monitoring.v3.AlertPolicy]. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/common.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/common.py new file mode 100644 index 00000000..6d5c2536 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/common.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'ComparisonType', + 'TypedValue', + 'TimeInterval', + 'Aggregation', + }, +) + + +class ComparisonType(proto.Enum): + r"""Specifies an ordering relationship on two arguments, called ``left`` + and ``right``. + """ + COMPARISON_UNSPECIFIED = 0 + COMPARISON_GT = 1 + COMPARISON_GE = 2 + COMPARISON_LT = 3 + COMPARISON_LE = 4 + COMPARISON_EQ = 5 + COMPARISON_NE = 6 + + +class TypedValue(proto.Message): + r"""A single strongly-typed value. + Attributes: + bool_value (bool): + A Boolean value: ``true`` or ``false``. + int64_value (int): + A 64-bit integer. Its range is approximately + ±9.2x1018. + double_value (float): + A 64-bit double-precision floating-point + number. Its magnitude is approximately + ±10±300 and it has 16 + significant digits of precision. + string_value (str): + A variable-length string value. + distribution_value (google.api.distribution_pb2.Distribution): + A distribution value. + """ + + bool_value = proto.Field( + proto.BOOL, + number=1, + oneof='value', + ) + int64_value = proto.Field( + proto.INT64, + number=2, + oneof='value', + ) + double_value = proto.Field( + proto.DOUBLE, + number=3, + oneof='value', + ) + string_value = proto.Field( + proto.STRING, + number=4, + oneof='value', + ) + distribution_value = proto.Field( + proto.MESSAGE, + number=5, + oneof='value', + message=distribution_pb2.Distribution, + ) + + +class TimeInterval(proto.Message): + r"""A closed time interval. It extends from the start time to the end + time, and includes both: ``[startTime, endTime]``. Valid time + intervals depend on the + ```MetricKind`` `__ + of the metric value. The end time must not be earlier than the start + time. When writing data points, the start time must not be more than + 25 hours in the past and the end time must not be more than five + minutes in the future. + + - For ``GAUGE`` metrics, the ``startTime`` value is technically + optional; if no value is specified, the start time defaults to + the value of the end time, and the interval represents a single + point in time. If both start and end times are specified, they + must be identical. Such an interval is valid only for ``GAUGE`` + metrics, which are point-in-time measurements. The end time of a + new interval must be at least a millisecond after the end time of + the previous interval. + + - For ``DELTA`` metrics, the start time and end time must specify a + non-zero interval, with subsequent points specifying contiguous + and non-overlapping intervals. For ``DELTA`` metrics, the start + time of the next interval must be at least a millisecond after + the end time of the previous interval. + + - For ``CUMULATIVE`` metrics, the start time and end time must + specify a a non-zero interval, with subsequent points specifying + the same start time and increasing end times, until an event + resets the cumulative value to zero and sets a new start time for + the following points. The new start time must be at least a + millisecond after the end time of the previous interval. + + - The start time of a new interval must be at least a millisecond + after the end time of the previous interval because intervals are + closed. If the start time of a new interval is the same as the + end time of the previous interval, then data written at the new + start time could overwrite data written at the previous end time. + + Attributes: + end_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The end of the time interval. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The beginning of the time interval. + The default value for the start time is the end + time. The start time must not be later than the + end time. + """ + + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + start_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + +class Aggregation(proto.Message): + r"""Describes how to combine multiple time series to provide a different + view of the data. Aggregation of time series is done in two steps. + First, each time series in the set is *aligned* to the same time + interval boundaries, then the set of time series is optionally + *reduced* in number. + + Alignment consists of applying the ``per_series_aligner`` operation + to each time series after its data has been divided into regular + ``alignment_period`` time intervals. This process takes *all* of the + data points in an alignment period, applies a mathematical + transformation such as averaging, minimum, maximum, delta, etc., and + converts them into a single data point per period. + + Reduction is when the aligned and transformed time series can + optionally be combined, reducing the number of time series through + similar mathematical transformations. Reduction involves applying a + ``cross_series_reducer`` to all the time series, optionally sorting + the time series into subsets with ``group_by_fields``, and applying + the reducer to each subset. + + The raw time series data can contain a huge amount of information + from multiple sources. Alignment and reduction transforms this mass + of data into a more manageable and representative collection of + data, for example "the 95% latency across the average of all tasks + in a cluster". This representative data can be more easily graphed + and comprehended, and the individual time series data is still + available for later drilldown. For more details, see `Filtering and + aggregation `__. + + Attributes: + alignment_period (google.protobuf.duration_pb2.Duration): + The ``alignment_period`` specifies a time interval, in + seconds, that is used to divide the data in all the [time + series][google.monitoring.v3.TimeSeries] into consistent + blocks of time. This will be done before the per-series + aligner can be applied to the data. + + The value must be at least 60 seconds. If a per-series + aligner other than ``ALIGN_NONE`` is specified, this field + is required or an error is returned. If no per-series + aligner is specified, or the aligner ``ALIGN_NONE`` is + specified, then this field is ignored. + + The maximum value of the ``alignment_period`` is 104 weeks + (2 years) for charts, and 90,000 seconds (25 hours) for + alerting policies. + per_series_aligner (google.cloud.monitoring_v3.types.Aggregation.Aligner): + An ``Aligner`` describes how to bring the data points in a + single time series into temporal alignment. Except for + ``ALIGN_NONE``, all alignments cause all the data points in + an ``alignment_period`` to be mathematically grouped + together, resulting in a single data point for each + ``alignment_period`` with end timestamp at the end of the + period. + + Not all alignment operations may be applied to all time + series. The valid choices depend on the ``metric_kind`` and + ``value_type`` of the original time series. Alignment can + change the ``metric_kind`` or the ``value_type`` of the time + series. + + Time series data must be aligned in order to perform + cross-time series reduction. If ``cross_series_reducer`` is + specified, then ``per_series_aligner`` must be specified and + not equal to ``ALIGN_NONE`` and ``alignment_period`` must be + specified; otherwise, an error is returned. + cross_series_reducer (google.cloud.monitoring_v3.types.Aggregation.Reducer): + The reduction operation to be used to combine time series + into a single time series, where the value of each data + point in the resulting series is a function of all the + already aligned values in the input time series. + + Not all reducer operations can be applied to all time + series. The valid choices depend on the ``metric_kind`` and + the ``value_type`` of the original time series. Reduction + can yield a time series with a different ``metric_kind`` or + ``value_type`` than the input time series. + + Time series data must first be aligned (see + ``per_series_aligner``) in order to perform cross-time + series reduction. If ``cross_series_reducer`` is specified, + then ``per_series_aligner`` must be specified, and must not + be ``ALIGN_NONE``. An ``alignment_period`` must also be + specified; otherwise, an error is returned. + group_by_fields (Sequence[str]): + The set of fields to preserve when ``cross_series_reducer`` + is specified. The ``group_by_fields`` determine how the time + series are partitioned into subsets prior to applying the + aggregation operation. Each subset contains time series that + have the same value for each of the grouping fields. Each + individual time series is a member of exactly one subset. + The ``cross_series_reducer`` is applied to each subset of + time series. It is not possible to reduce across different + resource types, so this field implicitly contains + ``resource.type``. Fields not specified in + ``group_by_fields`` are aggregated away. If + ``group_by_fields`` is not specified and all the time series + have the same resource type, then the time series are + aggregated into a single output time series. If + ``cross_series_reducer`` is not defined, this field is + ignored. + """ + class Aligner(proto.Enum): + r"""The ``Aligner`` specifies the operation that will be applied to the + data points in each alignment period in a time series. Except for + ``ALIGN_NONE``, which specifies that no operation be applied, each + alignment operation replaces the set of data values in each + alignment period with a single value: the result of applying the + operation to the data values. An aligned time series has a single + data value at the end of each ``alignment_period``. + + An alignment operation can change the data type of the values, too. + For example, if you apply a counting operation to boolean values, + the data ``value_type`` in the original time series is ``BOOLEAN``, + but the ``value_type`` in the aligned result is ``INT64``. + """ + ALIGN_NONE = 0 + ALIGN_DELTA = 1 + ALIGN_RATE = 2 + ALIGN_INTERPOLATE = 3 + ALIGN_NEXT_OLDER = 4 + ALIGN_MIN = 10 + ALIGN_MAX = 11 + ALIGN_MEAN = 12 + ALIGN_COUNT = 13 + ALIGN_SUM = 14 + ALIGN_STDDEV = 15 + ALIGN_COUNT_TRUE = 16 + ALIGN_COUNT_FALSE = 24 + ALIGN_FRACTION_TRUE = 17 + ALIGN_PERCENTILE_99 = 18 + ALIGN_PERCENTILE_95 = 19 + ALIGN_PERCENTILE_50 = 20 + ALIGN_PERCENTILE_05 = 21 + ALIGN_PERCENT_CHANGE = 23 + + class Reducer(proto.Enum): + r"""A Reducer operation describes how to aggregate data points + from multiple time series into a single time series, where the + value of each data point in the resulting series is a function + of all the already aligned values in the input time series. + """ + REDUCE_NONE = 0 + REDUCE_MEAN = 1 + REDUCE_MIN = 2 + REDUCE_MAX = 3 + REDUCE_SUM = 4 + REDUCE_STDDEV = 5 + REDUCE_COUNT = 6 + REDUCE_COUNT_TRUE = 7 + REDUCE_COUNT_FALSE = 15 + REDUCE_FRACTION_TRUE = 8 + REDUCE_PERCENTILE_99 = 9 + REDUCE_PERCENTILE_95 = 10 + REDUCE_PERCENTILE_50 = 11 + REDUCE_PERCENTILE_05 = 12 + + alignment_period = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + per_series_aligner = proto.Field( + proto.ENUM, + number=2, + enum=Aligner, + ) + cross_series_reducer = proto.Field( + proto.ENUM, + number=4, + enum=Reducer, + ) + group_by_fields = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/dropped_labels.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/dropped_labels.py new file mode 100644 index 00000000..10bf595e --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/dropped_labels.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'DroppedLabels', + }, +) + + +class DroppedLabels(proto.Message): + r"""A set of (label, value) pairs that were removed from a + Distribution time series during aggregation and then added as an + attachment to a Distribution.Exemplar. + + The full label set for the exemplars is constructed by using the + dropped pairs in combination with the label values that remain + on the aggregated Distribution time series. The constructed full + label set can be used to identify the specific entity, such as + the instance or job, which might be contributing to a long-tail. + However, with dropped labels, the storage requirements are + reduced because only the aggregated distribution values for a + large group of time series are stored. + + Note that there are no guarantees on ordering of the labels from + exemplar-to-exemplar and from distribution-to-distribution in + the same stream, and there may be duplicates. It is up to + clients to resolve any ambiguities. + + Attributes: + label (Sequence[google.cloud.monitoring_v3.types.DroppedLabels.LabelEntry]): + Map from label to its value, for all labels + dropped in any aggregation. + """ + + label = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group.py new file mode 100644 index 00000000..7032d8c0 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'Group', + }, +) + + +class Group(proto.Message): + r"""The description of a dynamic collection of monitored resources. Each + group has a filter that is matched against monitored resources and + their associated metadata. If a group's filter matches an available + monitored resource, then that resource is a member of that group. + Groups can contain any number of monitored resources, and each + monitored resource can be a member of any number of groups. + + Groups can be nested in parent-child hierarchies. The ``parentName`` + field identifies an optional parent for each group. If a group has a + parent, then the only monitored resources available to be matched by + the group's filter are the resources contained in the parent group. + In other words, a group contains the monitored resources that match + its filter and the filters of all the group's ancestors. A group + without a parent can contain any monitored resource. + + For example, consider an infrastructure running a set of instances + with two user-defined tags: ``"environment"`` and ``"role"``. A + parent group has a filter, ``environment="production"``. A child of + that parent group has a filter, ``role="transcoder"``. The parent + group contains all instances in the production environment, + regardless of their roles. The child group contains instances that + have the transcoder role *and* are in the production environment. + + The monitored resources contained in a group can change at any + moment, depending on what resources exist and what filters are + associated with the group and its ancestors. + + Attributes: + name (str): + Output only. The name of this group. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + When creating a group, this field is ignored and a new name + is created consisting of the project specified in the call + to ``CreateGroup`` and a unique ``[GROUP_ID]`` that is + generated automatically. + display_name (str): + A user-assigned name for this group, used + only for display purposes. + parent_name (str): + The name of the group's parent, if it has one. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + For groups with no parent, ``parent_name`` is the empty + string, ``""``. + filter (str): + The filter used to determine which monitored + resources belong to this group. + is_cluster (bool): + If true, the members of this group are + considered to be a cluster. The system can + perform additional analysis on groups that are + clusters. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + parent_name = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=5, + ) + is_cluster = proto.Field( + proto.BOOL, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group_service.py new file mode 100644 index 00000000..0e57590a --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group_service.py @@ -0,0 +1,346 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.types import common +from google.cloud.monitoring_v3.types import group as gm_group + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'ListGroupsRequest', + 'ListGroupsResponse', + 'GetGroupRequest', + 'CreateGroupRequest', + 'UpdateGroupRequest', + 'DeleteGroupRequest', + 'ListGroupMembersRequest', + 'ListGroupMembersResponse', + }, +) + + +class ListGroupsRequest(proto.Message): + r"""The ``ListGroup`` request. + Attributes: + name (str): + Required. The + `project `__ + whose groups are to be listed. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + children_of_group (str): + A group name. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + Returns groups whose ``parent_name`` field contains the + group name. If no groups have this parent, the results are + empty. + ancestors_of_group (str): + A group name. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + Returns groups that are ancestors of the specified group. + The groups are returned in order, starting with the + immediate parent and ending with the most distant ancestor. + If the specified group has no immediate parent, the results + are empty. + descendants_of_group (str): + A group name. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + + Returns the descendants of the specified group. This is a + superset of the results returned by the + ``children_of_group`` filter, and includes + children-of-children, and so forth. + page_size (int): + A positive number that is the maximum number + of results to return. + page_token (str): + If this field is not empty then it must contain the + ``next_page_token`` value returned by a previous call to + this method. Using this field causes the method to return + additional results from the previous method call. + """ + + name = proto.Field( + proto.STRING, + number=7, + ) + children_of_group = proto.Field( + proto.STRING, + number=2, + oneof='filter', + ) + ancestors_of_group = proto.Field( + proto.STRING, + number=3, + oneof='filter', + ) + descendants_of_group = proto.Field( + proto.STRING, + number=4, + oneof='filter', + ) + page_size = proto.Field( + proto.INT32, + number=5, + ) + page_token = proto.Field( + proto.STRING, + number=6, + ) + + +class ListGroupsResponse(proto.Message): + r"""The ``ListGroups`` response. + Attributes: + group (Sequence[google.cloud.monitoring_v3.types.Group]): + The groups that match the specified filters. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + """ + + @property + def raw_page(self): + return self + + group = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gm_group.Group, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetGroupRequest(proto.Message): + r"""The ``GetGroup`` request. + Attributes: + name (str): + Required. The group to retrieve. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateGroupRequest(proto.Message): + r"""The ``CreateGroup`` request. + Attributes: + name (str): + Required. The + `project `__ + in which to create the group. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + group (google.cloud.monitoring_v3.types.Group): + Required. A group definition. It is an error to define the + ``name`` field because the system assigns the name. + validate_only (bool): + If true, validate this request but do not + create the group. + """ + + name = proto.Field( + proto.STRING, + number=4, + ) + group = proto.Field( + proto.MESSAGE, + number=2, + message=gm_group.Group, + ) + validate_only = proto.Field( + proto.BOOL, + number=3, + ) + + +class UpdateGroupRequest(proto.Message): + r"""The ``UpdateGroup`` request. + Attributes: + group (google.cloud.monitoring_v3.types.Group): + Required. The new definition of the group. All fields of the + existing group, excepting ``name``, are replaced with the + corresponding fields of this group. + validate_only (bool): + If true, validate this request but do not + update the existing group. + """ + + group = proto.Field( + proto.MESSAGE, + number=2, + message=gm_group.Group, + ) + validate_only = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteGroupRequest(proto.Message): + r"""The ``DeleteGroup`` request. The default behavior is to be able to + delete a single group without any descendants. + + Attributes: + name (str): + Required. The group to delete. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + recursive (bool): + If this field is true, then the request means + to delete a group with all its descendants. + Otherwise, the request means to delete a group + only when it has no descendants. The default + value is false. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + recursive = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListGroupMembersRequest(proto.Message): + r"""The ``ListGroupMembers`` request. + Attributes: + name (str): + Required. The group whose members are listed. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] + page_size (int): + A positive number that is the maximum number + of results to return. + page_token (str): + If this field is not empty then it must contain the + ``next_page_token`` value returned by a previous call to + this method. Using this field causes the method to return + additional results from the previous method call. + filter (str): + An optional `list + filter `__ + describing the members to be returned. The filter may + reference the type, labels, and metadata of monitored + resources that comprise the group. For example, to return + only resources representing Compute Engine VM instances, use + this filter: + + :: + + `resource.type = "gce_instance"` + interval (google.cloud.monitoring_v3.types.TimeInterval): + An optional time interval for which results + should be returned. Only members that were part + of the group during the specified interval are + included in the response. If no interval is + provided then the group membership over the last + minute is returned. + """ + + name = proto.Field( + proto.STRING, + number=7, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + filter = proto.Field( + proto.STRING, + number=5, + ) + interval = proto.Field( + proto.MESSAGE, + number=6, + message=common.TimeInterval, + ) + + +class ListGroupMembersResponse(proto.Message): + r"""The ``ListGroupMembers`` response. + Attributes: + members (Sequence[google.api.monitored_resource_pb2.MonitoredResource]): + A set of monitored resources in the group. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + total_size (int): + The total number of elements matching this + request. + """ + + @property + def raw_page(self): + return self + + members = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=monitored_resource_pb2.MonitoredResource, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + total_size = proto.Field( + proto.INT32, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric.py new file mode 100644 index 00000000..28baa973 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric.py @@ -0,0 +1,417 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import label_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.types import common + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'Point', + 'TimeSeries', + 'TimeSeriesDescriptor', + 'TimeSeriesData', + 'LabelValue', + 'QueryError', + 'TextLocator', + }, +) + + +class Point(proto.Message): + r"""A single data point in a time series. + Attributes: + interval (google.cloud.monitoring_v3.types.TimeInterval): + The time interval to which the data point applies. For + ``GAUGE`` metrics, the start time is optional, but if it is + supplied, it must equal the end time. For ``DELTA`` metrics, + the start and end time should specify a non-zero interval, + with subsequent points specifying contiguous and + non-overlapping intervals. For ``CUMULATIVE`` metrics, the + start and end time should specify a non-zero interval, with + subsequent points specifying the same start time and + increasing end times, until an event resets the cumulative + value to zero and sets a new start time for the following + points. + value (google.cloud.monitoring_v3.types.TypedValue): + The value of the data point. + """ + + interval = proto.Field( + proto.MESSAGE, + number=1, + message=common.TimeInterval, + ) + value = proto.Field( + proto.MESSAGE, + number=2, + message=common.TypedValue, + ) + + +class TimeSeries(proto.Message): + r"""A collection of data points that describes the time-varying + values of a metric. A time series is identified by a combination + of a fully-specified monitored resource and a fully-specified + metric. This type is used for both listing and creating time + series. + + Attributes: + metric (google.api.metric_pb2.Metric): + The associated metric. A fully-specified + metric used to identify the time series. + resource (google.api.monitored_resource_pb2.MonitoredResource): + The associated monitored resource. Custom metrics can use + only certain monitored resource types in their time series + data. For more information, see `Monitored resources for + custom + metrics `__. + metadata (google.api.monitored_resource_pb2.MonitoredResourceMetadata): + Output only. The associated monitored + resource metadata. When reading a time series, + this field will include metadata labels that are + explicitly named in the reduction. When creating + a time series, this field is ignored. + metric_kind (google.api.metric_pb2.MetricKind): + The metric kind of the time series. When listing time + series, this metric kind might be different from the metric + kind of the associated metric if this time series is an + alignment or reduction of other time series. + + When creating a time series, this field is optional. If + present, it must be the same as the metric kind of the + associated metric. If the associated metric's descriptor + must be auto-created, then this field specifies the metric + kind of the new descriptor and must be either ``GAUGE`` (the + default) or ``CUMULATIVE``. + value_type (google.api.metric_pb2.ValueType): + The value type of the time series. When listing time series, + this value type might be different from the value type of + the associated metric if this time series is an alignment or + reduction of other time series. + + When creating a time series, this field is optional. If + present, it must be the same as the type of the data in the + ``points`` field. + points (Sequence[google.cloud.monitoring_v3.types.Point]): + The data points of this time series. When listing time + series, points are returned in reverse time order. + + When creating a time series, this field must contain exactly + one point and the point's type must be the same as the value + type of the associated metric. If the associated metric's + descriptor must be auto-created, then the value type of the + descriptor is determined by the point's type, which must be + ``BOOL``, ``INT64``, ``DOUBLE``, or ``DISTRIBUTION``. + unit (str): + The units in which the metric value is reported. It is only + applicable if the ``value_type`` is ``INT64``, ``DOUBLE``, + or ``DISTRIBUTION``. The ``unit`` defines the representation + of the stored metric values. + """ + + metric = proto.Field( + proto.MESSAGE, + number=1, + message=metric_pb2.Metric, + ) + resource = proto.Field( + proto.MESSAGE, + number=2, + message=monitored_resource_pb2.MonitoredResource, + ) + metadata = proto.Field( + proto.MESSAGE, + number=7, + message=monitored_resource_pb2.MonitoredResourceMetadata, + ) + metric_kind = proto.Field( + proto.ENUM, + number=3, + enum=metric_pb2.MetricDescriptor.MetricKind, + ) + value_type = proto.Field( + proto.ENUM, + number=4, + enum=metric_pb2.MetricDescriptor.ValueType, + ) + points = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='Point', + ) + unit = proto.Field( + proto.STRING, + number=8, + ) + + +class TimeSeriesDescriptor(proto.Message): + r"""A descriptor for the labels and points in a time series. + Attributes: + label_descriptors (Sequence[google.api.label_pb2.LabelDescriptor]): + Descriptors for the labels. + point_descriptors (Sequence[google.cloud.monitoring_v3.types.TimeSeriesDescriptor.ValueDescriptor]): + Descriptors for the point data value columns. + """ + + class ValueDescriptor(proto.Message): + r"""A descriptor for the value columns in a data point. + Attributes: + key (str): + The value key. + value_type (google.api.metric_pb2.ValueType): + The value type. + metric_kind (google.api.metric_pb2.MetricKind): + The value stream kind. + unit (str): + The unit in which ``time_series`` point values are reported. + ``unit`` follows the UCUM format for units as seen in + https://unitsofmeasure.org/ucum.html. ``unit`` is only valid + if ``value_type`` is INTEGER, DOUBLE, DISTRIBUTION. + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + value_type = proto.Field( + proto.ENUM, + number=2, + enum=metric_pb2.MetricDescriptor.ValueType, + ) + metric_kind = proto.Field( + proto.ENUM, + number=3, + enum=metric_pb2.MetricDescriptor.MetricKind, + ) + unit = proto.Field( + proto.STRING, + number=4, + ) + + label_descriptors = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=label_pb2.LabelDescriptor, + ) + point_descriptors = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=ValueDescriptor, + ) + + +class TimeSeriesData(proto.Message): + r"""Represents the values of a time series associated with a + TimeSeriesDescriptor. + + Attributes: + label_values (Sequence[google.cloud.monitoring_v3.types.LabelValue]): + The values of the labels in the time series identifier, + given in the same order as the ``label_descriptors`` field + of the TimeSeriesDescriptor associated with this object. + Each value must have a value of the type given in the + corresponding entry of ``label_descriptors``. + point_data (Sequence[google.cloud.monitoring_v3.types.TimeSeriesData.PointData]): + The points in the time series. + """ + + class PointData(proto.Message): + r"""A point's value columns and time interval. Each point has one or + more point values corresponding to the entries in + ``point_descriptors`` field in the TimeSeriesDescriptor associated + with this object. + + Attributes: + values (Sequence[google.cloud.monitoring_v3.types.TypedValue]): + The values that make up the point. + time_interval (google.cloud.monitoring_v3.types.TimeInterval): + The time interval associated with the point. + """ + + values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common.TypedValue, + ) + time_interval = proto.Field( + proto.MESSAGE, + number=2, + message=common.TimeInterval, + ) + + label_values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LabelValue', + ) + point_data = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PointData, + ) + + +class LabelValue(proto.Message): + r"""A label value. + Attributes: + bool_value (bool): + A bool label value. + int64_value (int): + An int64 label value. + string_value (str): + A string label value. + """ + + bool_value = proto.Field( + proto.BOOL, + number=1, + oneof='value', + ) + int64_value = proto.Field( + proto.INT64, + number=2, + oneof='value', + ) + string_value = proto.Field( + proto.STRING, + number=3, + oneof='value', + ) + + +class QueryError(proto.Message): + r"""An error associated with a query in the time series query + language format. + + Attributes: + locator (google.cloud.monitoring_v3.types.TextLocator): + The location of the time series query + language text that this error applies to. + message (str): + The error message. + """ + + locator = proto.Field( + proto.MESSAGE, + number=1, + message='TextLocator', + ) + message = proto.Field( + proto.STRING, + number=2, + ) + + +class TextLocator(proto.Message): + r"""A locator for text. Indicates a particular part of the text of a + request or of an object referenced in the request. + + For example, suppose the request field ``text`` contains: + + text: "The quick brown fox jumps over the lazy dog." + + Then the locator: + + source: "text" start_position { line: 1 column: 17 } end_position { + line: 1 column: 19 } + + refers to the part of the text: "fox". + + Attributes: + source (str): + The source of the text. The source may be a field in the + request, in which case its format is the format of the + google.rpc.BadRequest.FieldViolation.field field in + https://cloud.google.com/apis/design/errors#error_details. + It may also be be a source other than the request field + (e.g. a macro definition referenced in the text of the + query), in which case this is the name of the source (e.g. + the macro name). + start_position (google.cloud.monitoring_v3.types.TextLocator.Position): + The position of the first byte within the + text. + end_position (google.cloud.monitoring_v3.types.TextLocator.Position): + The position of the last byte within the + text. + nested_locator (google.cloud.monitoring_v3.types.TextLocator): + If ``source``, ``start_position``, and ``end_position`` + describe a call on some object (e.g. a macro in the time + series query language text) and a location is to be + designated in that object's text, ``nested_locator`` + identifies the location within that object. + nesting_reason (str): + When ``nested_locator`` is set, this field gives the reason + for the nesting. Usually, the reason is a macro invocation. + In that case, the macro name (including the leading '@') + signals the location of the macro call in the text and a + macro argument name (including the leading '$') signals the + location of the macro argument inside the macro body that + got substituted away. + """ + + class Position(proto.Message): + r"""The position of a byte within the text. + Attributes: + line (int): + The line, starting with 1, where the byte is + positioned. + column (int): + The column within the line, starting with 1, + where the byte is positioned. This is a byte + index even though the text is UTF-8. + """ + + line = proto.Field( + proto.INT32, + number=1, + ) + column = proto.Field( + proto.INT32, + number=2, + ) + + source = proto.Field( + proto.STRING, + number=1, + ) + start_position = proto.Field( + proto.MESSAGE, + number=2, + message=Position, + ) + end_position = proto.Field( + proto.MESSAGE, + number=3, + message=Position, + ) + nested_locator = proto.Field( + proto.MESSAGE, + number=4, + message='TextLocator', + ) + nesting_reason = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric_service.py new file mode 100644 index 00000000..83db5344 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric_service.py @@ -0,0 +1,665 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.monitoring_v3.types import common +from google.cloud.monitoring_v3.types import metric as gm_metric +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'GetMonitoredResourceDescriptorRequest', + 'ListMetricDescriptorsRequest', + 'ListMetricDescriptorsResponse', + 'GetMetricDescriptorRequest', + 'CreateMetricDescriptorRequest', + 'DeleteMetricDescriptorRequest', + 'ListTimeSeriesRequest', + 'ListTimeSeriesResponse', + 'CreateTimeSeriesRequest', + 'CreateTimeSeriesError', + 'CreateTimeSeriesSummary', + 'QueryTimeSeriesRequest', + 'QueryTimeSeriesResponse', + 'QueryErrorList', + }, +) + + +class ListMonitoredResourceDescriptorsRequest(proto.Message): + r"""The ``ListMonitoredResourceDescriptors`` request. + Attributes: + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + filter (str): + An optional + `filter `__ + describing the descriptors to be returned. The filter can + reference the descriptor's type and labels. For example, the + following filter returns only Google Compute Engine + descriptors that have an ``id`` label: + + :: + + resource.type = starts_with("gce_") AND resource.label:id + page_size (int): + A positive number that is the maximum number + of results to return. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return + additional results from the previous method call. + """ + + name = proto.Field( + proto.STRING, + number=5, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListMonitoredResourceDescriptorsResponse(proto.Message): + r"""The ``ListMonitoredResourceDescriptors`` response. + Attributes: + resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): + The monitored resource descriptors that are available to + this project and that match ``filter``, if present. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + """ + + @property + def raw_page(self): + return self + + resource_descriptors = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=monitored_resource_pb2.MonitoredResourceDescriptor, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetMonitoredResourceDescriptorRequest(proto.Message): + r"""The ``GetMonitoredResourceDescriptor`` request. + Attributes: + name (str): + Required. The monitored resource descriptor to get. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/monitoredResourceDescriptors/[RESOURCE_TYPE] + + The ``[RESOURCE_TYPE]`` is a predefined type, such as + ``cloudsql_database``. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + + +class ListMetricDescriptorsRequest(proto.Message): + r"""The ``ListMetricDescriptors`` request. + Attributes: + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + filter (str): + If this field is empty, all custom and system-defined metric + descriptors are returned. Otherwise, the + `filter `__ + specifies which metric descriptors are to be returned. For + example, the following filter matches all `custom + metrics `__: + + :: + + metric.type = starts_with("custom.googleapis.com/") + page_size (int): + A positive number that is the maximum number + of results to return. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return + additional results from the previous method call. + """ + + name = proto.Field( + proto.STRING, + number=5, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListMetricDescriptorsResponse(proto.Message): + r"""The ``ListMetricDescriptors`` response. + Attributes: + metric_descriptors (Sequence[google.api.metric_pb2.MetricDescriptor]): + The metric descriptors that are available to the project and + that match the value of ``filter``, if present. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + """ + + @property + def raw_page(self): + return self + + metric_descriptors = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=metric_pb2.MetricDescriptor, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetMetricDescriptorRequest(proto.Message): + r"""The ``GetMetricDescriptor`` request. + Attributes: + name (str): + Required. The metric descriptor on which to execute the + request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] + + An example value of ``[METRIC_ID]`` is + ``"compute.googleapis.com/instance/disk/read_bytes_count"``. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateMetricDescriptorRequest(proto.Message): + r"""The ``CreateMetricDescriptor`` request. + Attributes: + name (str): + Required. The + `project `__ + on which to execute the request. The format is: 4 + projects/[PROJECT_ID_OR_NUMBER] + metric_descriptor (google.api.metric_pb2.MetricDescriptor): + Required. The new `custom + metric `__ + descriptor. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + metric_descriptor = proto.Field( + proto.MESSAGE, + number=2, + message=metric_pb2.MetricDescriptor, + ) + + +class DeleteMetricDescriptorRequest(proto.Message): + r"""The ``DeleteMetricDescriptor`` request. + Attributes: + name (str): + Required. The metric descriptor on which to execute the + request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] + + An example of ``[METRIC_ID]`` is: + ``"custom.googleapis.com/my_test_metric"``. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTimeSeriesRequest(proto.Message): + r"""The ``ListTimeSeries`` request. + Attributes: + name (str): + Required. The + `project `__, + organization or folder on which to execute the request. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + organizations/[ORGANIZATION_ID] + folders/[FOLDER_ID] + filter (str): + Required. A `monitoring + filter `__ + that specifies which time series should be returned. The + filter must specify a single metric type, and can + additionally specify metric labels and other information. + For example: + + :: + + metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND + metric.labels.instance_name = "my-instance-name". + interval (google.cloud.monitoring_v3.types.TimeInterval): + Required. The time interval for which results + should be returned. Only time series that + contain data points in the specified interval + are included in the response. + aggregation (google.cloud.monitoring_v3.types.Aggregation): + Specifies the alignment of data points in individual time + series as well as how to combine the retrieved time series + across specified labels. + + By default (if no ``aggregation`` is explicitly specified), + the raw time series data is returned. + secondary_aggregation (google.cloud.monitoring_v3.types.Aggregation): + Apply a second aggregation after ``aggregation`` is applied. + May only be specified if ``aggregation`` is specified. + order_by (str): + Unsupported: must be left blank. The points + in each time series are currently returned in + reverse time order (most recent to oldest). + view (google.cloud.monitoring_v3.types.ListTimeSeriesRequest.TimeSeriesView): + Required. Specifies which information is + returned about the time series. + page_size (int): + A positive number that is the maximum number of results to + return. If ``page_size`` is empty or more than 100,000 + results, the effective ``page_size`` is 100,000 results. If + ``view`` is set to ``FULL``, this is the maximum number of + ``Points`` returned. If ``view`` is set to ``HEADERS``, this + is the maximum number of ``TimeSeries`` returned. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return + additional results from the previous method call. + """ + class TimeSeriesView(proto.Enum): + r"""Controls which fields are returned by ``ListTimeSeries``.""" + FULL = 0 + HEADERS = 1 + + name = proto.Field( + proto.STRING, + number=10, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + interval = proto.Field( + proto.MESSAGE, + number=4, + message=common.TimeInterval, + ) + aggregation = proto.Field( + proto.MESSAGE, + number=5, + message=common.Aggregation, + ) + secondary_aggregation = proto.Field( + proto.MESSAGE, + number=11, + message=common.Aggregation, + ) + order_by = proto.Field( + proto.STRING, + number=6, + ) + view = proto.Field( + proto.ENUM, + number=7, + enum=TimeSeriesView, + ) + page_size = proto.Field( + proto.INT32, + number=8, + ) + page_token = proto.Field( + proto.STRING, + number=9, + ) + + +class ListTimeSeriesResponse(proto.Message): + r"""The ``ListTimeSeries`` response. + Attributes: + time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): + One or more time series that match the filter + included in the request. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + execution_errors (Sequence[google.rpc.status_pb2.Status]): + Query execution errors that may have caused + the time series data returned to be incomplete. + unit (str): + The unit in which all ``time_series`` point values are + reported. ``unit`` follows the UCUM format for units as seen + in https://unitsofmeasure.org/ucum.html. If different + ``time_series`` have different units (for example, because + they come from different metric types, or a unit is absent), + then ``unit`` will be "{not_a_unit}". + """ + + @property + def raw_page(self): + return self + + time_series = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gm_metric.TimeSeries, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + execution_errors = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + unit = proto.Field( + proto.STRING, + number=5, + ) + + +class CreateTimeSeriesRequest(proto.Message): + r"""The ``CreateTimeSeries`` request. + Attributes: + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): + Required. The new data to be added to a list of time series. + Adds at most one data point to each of several time series. + The new data point must be more recent than any other point + in its time series. Each ``TimeSeries`` value must fully + specify a unique time series by supplying all label values + for the metric and the monitored resource. + + The maximum number of ``TimeSeries`` objects per ``Create`` + request is 200. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + time_series = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gm_metric.TimeSeries, + ) + + +class CreateTimeSeriesError(proto.Message): + r"""DEPRECATED. Used to hold per-time-series error status. + Attributes: + time_series (google.cloud.monitoring_v3.types.TimeSeries): + DEPRECATED. Time series ID that resulted in the ``status`` + error. + status (google.rpc.status_pb2.Status): + DEPRECATED. The status of the requested write operation for + ``time_series``. + """ + + time_series = proto.Field( + proto.MESSAGE, + number=1, + message=gm_metric.TimeSeries, + ) + status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class CreateTimeSeriesSummary(proto.Message): + r"""Summary of the result of a failed request to write data to a + time series. + + Attributes: + total_point_count (int): + The number of points in the request. + success_point_count (int): + The number of points that were successfully + written. + errors (Sequence[google.cloud.monitoring_v3.types.CreateTimeSeriesSummary.Error]): + The number of points that failed to be + written. Order is not guaranteed. + """ + + class Error(proto.Message): + r"""Detailed information about an error category. + Attributes: + status (google.rpc.status_pb2.Status): + The status of the requested write operation. + point_count (int): + The number of points that couldn't be written because of + ``status``. + """ + + status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + point_count = proto.Field( + proto.INT32, + number=2, + ) + + total_point_count = proto.Field( + proto.INT32, + number=1, + ) + success_point_count = proto.Field( + proto.INT32, + number=2, + ) + errors = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Error, + ) + + +class QueryTimeSeriesRequest(proto.Message): + r"""The ``QueryTimeSeries`` request. + Attributes: + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + query (str): + Required. The query in the `Monitoring Query + Language `__ + format. The default time zone is in UTC. + page_size (int): + A positive number that is the maximum number of + time_series_data to return. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return + additional results from the previous method call. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + query = proto.Field( + proto.STRING, + number=7, + ) + page_size = proto.Field( + proto.INT32, + number=9, + ) + page_token = proto.Field( + proto.STRING, + number=10, + ) + + +class QueryTimeSeriesResponse(proto.Message): + r"""The ``QueryTimeSeries`` response. + Attributes: + time_series_descriptor (google.cloud.monitoring_v3.types.TimeSeriesDescriptor): + The descriptor for the time series data. + time_series_data (Sequence[google.cloud.monitoring_v3.types.TimeSeriesData]): + The time series data. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + partial_errors (Sequence[google.rpc.status_pb2.Status]): + Query execution errors that may have caused + the time series data returned to be incomplete. + The available data will be available in the + response. + """ + + @property + def raw_page(self): + return self + + time_series_descriptor = proto.Field( + proto.MESSAGE, + number=8, + message=gm_metric.TimeSeriesDescriptor, + ) + time_series_data = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=gm_metric.TimeSeriesData, + ) + next_page_token = proto.Field( + proto.STRING, + number=10, + ) + partial_errors = proto.RepeatedField( + proto.MESSAGE, + number=11, + message=status_pb2.Status, + ) + + +class QueryErrorList(proto.Message): + r"""This is an error detail intended to be used with INVALID_ARGUMENT + errors. + + Attributes: + errors (Sequence[google.cloud.monitoring_v3.types.QueryError]): + Errors in parsing the time series query + language text. The number of errors in the + response may be limited. + error_summary (str): + A summary of all the errors. + """ + + errors = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gm_metric.QueryError, + ) + error_summary = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/mutation_record.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/mutation_record.py new file mode 100644 index 00000000..2739618c --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/mutation_record.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'MutationRecord', + }, +) + + +class MutationRecord(proto.Message): + r"""Describes a change made to a configuration. + Attributes: + mutate_time (google.protobuf.timestamp_pb2.Timestamp): + When the change occurred. + mutated_by (str): + The email address of the user making the + change. + """ + + mutate_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + mutated_by = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification.py new file mode 100644 index 00000000..b4f8fdf1 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification.py @@ -0,0 +1,256 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.cloud.monitoring_v3.types import mutation_record +from google.protobuf import wrappers_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'NotificationChannelDescriptor', + 'NotificationChannel', + }, +) + + +class NotificationChannelDescriptor(proto.Message): + r"""A description of a notification channel. The descriptor + includes the properties of the channel and the set of labels or + fields that must be specified to configure channels of a given + type. + + Attributes: + name (str): + The full REST resource name for this descriptor. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannelDescriptors/[TYPE] + + In the above, ``[TYPE]`` is the value of the ``type`` field. + type_ (str): + The type of notification channel, such as "email" and "sms". + To view the full list of channels, see `Channel + descriptors `__. + Notification channel types are globally unique. + display_name (str): + A human-readable name for the notification + channel type. This form of the name is suitable + for a user interface. + description (str): + A human-readable description of the + notification channel type. The description may + include a description of the properties of the + channel and pointers to external documentation. + labels (Sequence[google.api.label_pb2.LabelDescriptor]): + The set of labels that must be defined to + identify a particular channel of the + corresponding type. Each label includes a + description for how that field should be + populated. + launch_stage (google.api.launch_stage_pb2.LaunchStage): + The product launch stage for channels of this + type. + """ + + name = proto.Field( + proto.STRING, + number=6, + ) + type_ = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + labels = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=label_pb2.LabelDescriptor, + ) + launch_stage = proto.Field( + proto.ENUM, + number=7, + enum=launch_stage_pb2.LaunchStage, + ) + + +class NotificationChannel(proto.Message): + r"""A ``NotificationChannel`` is a medium through which an alert is + delivered when a policy violation is detected. Examples of channels + include email, SMS, and third-party messaging applications. Fields + containing sensitive information like authentication tokens or + contact info are only partially populated on retrieval. + + Attributes: + type_ (str): + The type of the notification channel. This field matches the + value of the + [NotificationChannelDescriptor.type][google.monitoring.v3.NotificationChannelDescriptor.type] + field. + name (str): + The full REST resource name for this channel. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] + + The ``[CHANNEL_ID]`` is automatically assigned by the server + on creation. + display_name (str): + An optional human-readable name for this + notification channel. It is recommended that you + specify a non-empty and unique name in order to + make it easier to identify the channels in your + project, though this is not enforced. The + display name is limited to 512 Unicode + characters. + description (str): + An optional human-readable description of + this notification channel. This description may + provide additional details, beyond the display + name, for the channel. This may not exceed 1024 + Unicode characters. + labels (Sequence[google.cloud.monitoring_v3.types.NotificationChannel.LabelsEntry]): + Configuration fields that define the channel and its + behavior. The permissible and required labels are specified + in the + [NotificationChannelDescriptor.labels][google.monitoring.v3.NotificationChannelDescriptor.labels] + of the ``NotificationChannelDescriptor`` corresponding to + the ``type`` field. + user_labels (Sequence[google.cloud.monitoring_v3.types.NotificationChannel.UserLabelsEntry]): + User-supplied key/value data that does not need to conform + to the corresponding ``NotificationChannelDescriptor``'s + schema, unlike the ``labels`` field. This field is intended + to be used for organizing and identifying the + ``NotificationChannel`` objects. + + The field can contain up to 64 entries. Each key and value + is limited to 63 Unicode characters or 128 bytes, whichever + is smaller. Labels and values can contain only lowercase + letters, numerals, underscores, and dashes. Keys must begin + with a letter. + verification_status (google.cloud.monitoring_v3.types.NotificationChannel.VerificationStatus): + Indicates whether this channel has been verified or not. On + a + [``ListNotificationChannels``][google.monitoring.v3.NotificationChannelService.ListNotificationChannels] + or + [``GetNotificationChannel``][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] + operation, this field is expected to be populated. + + If the value is ``UNVERIFIED``, then it indicates that the + channel is non-functioning (it both requires verification + and lacks verification); otherwise, it is assumed that the + channel works. + + If the channel is neither ``VERIFIED`` nor ``UNVERIFIED``, + it implies that the channel is of a type that does not + require verification or that this specific channel has been + exempted from verification because it was created prior to + verification being required for channels of this type. + + This field cannot be modified using a standard + [``UpdateNotificationChannel``][google.monitoring.v3.NotificationChannelService.UpdateNotificationChannel] + operation. To change the value of this field, you must call + [``VerifyNotificationChannel``][google.monitoring.v3.NotificationChannelService.VerifyNotificationChannel]. + enabled (google.protobuf.wrappers_pb2.BoolValue): + Whether notifications are forwarded to the + described channel. This makes it possible to + disable delivery of notifications to a + particular channel without removing the channel + from all alerting policies that reference the + channel. This is a more convenient approach when + the change is temporary and you want to receive + notifications from the same set of alerting + policies on the channel at some point in the + future. + creation_record (google.cloud.monitoring_v3.types.MutationRecord): + Record of the creation of this channel. + mutation_records (Sequence[google.cloud.monitoring_v3.types.MutationRecord]): + Records of the modification of this channel. + """ + class VerificationStatus(proto.Enum): + r"""Indicates whether the channel has been verified or not. It is + illegal to specify this field in a + [``CreateNotificationChannel``][google.monitoring.v3.NotificationChannelService.CreateNotificationChannel] + or an + [``UpdateNotificationChannel``][google.monitoring.v3.NotificationChannelService.UpdateNotificationChannel] + operation. + """ + VERIFICATION_STATUS_UNSPECIFIED = 0 + UNVERIFIED = 1 + VERIFIED = 2 + + type_ = proto.Field( + proto.STRING, + number=1, + ) + name = proto.Field( + proto.STRING, + number=6, + ) + display_name = proto.Field( + proto.STRING, + number=3, + ) + description = proto.Field( + proto.STRING, + number=4, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + user_labels = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + verification_status = proto.Field( + proto.ENUM, + number=9, + enum=VerificationStatus, + ) + enabled = proto.Field( + proto.MESSAGE, + number=11, + message=wrappers_pb2.BoolValue, + ) + creation_record = proto.Field( + proto.MESSAGE, + number=12, + message=mutation_record.MutationRecord, + ) + mutation_records = proto.RepeatedField( + proto.MESSAGE, + number=13, + message=mutation_record.MutationRecord, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification_service.py new file mode 100644 index 00000000..418262f2 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification_service.py @@ -0,0 +1,445 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.monitoring_v3.types import notification +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'ListNotificationChannelDescriptorsRequest', + 'ListNotificationChannelDescriptorsResponse', + 'GetNotificationChannelDescriptorRequest', + 'CreateNotificationChannelRequest', + 'ListNotificationChannelsRequest', + 'ListNotificationChannelsResponse', + 'GetNotificationChannelRequest', + 'UpdateNotificationChannelRequest', + 'DeleteNotificationChannelRequest', + 'SendNotificationChannelVerificationCodeRequest', + 'GetNotificationChannelVerificationCodeRequest', + 'GetNotificationChannelVerificationCodeResponse', + 'VerifyNotificationChannelRequest', + }, +) + + +class ListNotificationChannelDescriptorsRequest(proto.Message): + r"""The ``ListNotificationChannelDescriptors`` request. + Attributes: + name (str): + Required. The REST resource name of the parent from which to + retrieve the notification channel descriptors. The expected + syntax is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + Note that this + `names `__ + the parent container in which to look for the descriptors; + to retrieve a single descriptor by name, use the + [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] + operation, instead. + page_size (int): + The maximum number of results to return in a + single response. If not set to a positive + number, a reasonable value will be chosen by the + service. + page_token (str): + If non-empty, ``page_token`` must contain a value returned + as the ``next_page_token`` in a previous response to request + the next set of results. + """ + + name = proto.Field( + proto.STRING, + number=4, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListNotificationChannelDescriptorsResponse(proto.Message): + r"""The ``ListNotificationChannelDescriptors`` response. + Attributes: + channel_descriptors (Sequence[google.cloud.monitoring_v3.types.NotificationChannelDescriptor]): + The monitored resource descriptors supported + for the specified project, optionally filtered. + next_page_token (str): + If not empty, indicates that there may be more results that + match the request. Use the value in the ``page_token`` field + in a subsequent request to fetch the next set of results. If + empty, all results have been returned. + """ + + @property + def raw_page(self): + return self + + channel_descriptors = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=notification.NotificationChannelDescriptor, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetNotificationChannelDescriptorRequest(proto.Message): + r"""The ``GetNotificationChannelDescriptor`` response. + Attributes: + name (str): + Required. The channel type for which to execute the request. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannelDescriptors/[CHANNEL_TYPE] + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateNotificationChannelRequest(proto.Message): + r"""The ``CreateNotificationChannel`` request. + Attributes: + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This names the container into which the channel will be + written, this does not name the newly created channel. The + resulting channel's name will have a normalized version of + this field as a prefix, but will add + ``/notificationChannels/[CHANNEL_ID]`` to identify the + channel. + notification_channel (google.cloud.monitoring_v3.types.NotificationChannel): + Required. The definition of the ``NotificationChannel`` to + create. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + notification_channel = proto.Field( + proto.MESSAGE, + number=2, + message=notification.NotificationChannel, + ) + + +class ListNotificationChannelsRequest(proto.Message): + r"""The ``ListNotificationChannels`` request. + Attributes: + name (str): + Required. The + `project `__ + on which to execute the request. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + This names the container in which to look for the + notification channels; it does not name a specific channel. + To query a specific channel by REST resource name, use the + [``GetNotificationChannel``][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] + operation. + filter (str): + If provided, this field specifies the criteria that must be + met by notification channels to be included in the response. + + For more details, see `sorting and + filtering `__. + order_by (str): + A comma-separated list of fields by which to sort the + result. Supports the same set of fields as in ``filter``. + Entries can be prefixed with a minus sign to sort in + descending rather than ascending order. + + For more details, see `sorting and + filtering `__. + page_size (int): + The maximum number of results to return in a + single response. If not set to a positive + number, a reasonable value will be chosen by the + service. + page_token (str): + If non-empty, ``page_token`` must contain a value returned + as the ``next_page_token`` in a previous response to request + the next set of results. + """ + + name = proto.Field( + proto.STRING, + number=5, + ) + filter = proto.Field( + proto.STRING, + number=6, + ) + order_by = proto.Field( + proto.STRING, + number=7, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListNotificationChannelsResponse(proto.Message): + r"""The ``ListNotificationChannels`` response. + Attributes: + notification_channels (Sequence[google.cloud.monitoring_v3.types.NotificationChannel]): + The notification channels defined for the + specified project. + next_page_token (str): + If not empty, indicates that there may be more results that + match the request. Use the value in the ``page_token`` field + in a subsequent request to fetch the next set of results. If + empty, all results have been returned. + total_size (int): + The total number of notification channels in + all pages. This number is only an estimate, and + may change in subsequent pages. + https://aip.dev/158 + """ + + @property + def raw_page(self): + return self + + notification_channels = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=notification.NotificationChannel, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + total_size = proto.Field( + proto.INT32, + number=4, + ) + + +class GetNotificationChannelRequest(proto.Message): + r"""The ``GetNotificationChannel`` request. + Attributes: + name (str): + Required. The channel for which to execute the request. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateNotificationChannelRequest(proto.Message): + r"""The ``UpdateNotificationChannel`` request. + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to update. + notification_channel (google.cloud.monitoring_v3.types.NotificationChannel): + Required. A description of the changes to be applied to the + specified notification channel. The description must provide + a definition for fields to be updated; the names of these + fields should also be included in the ``update_mask``. + """ + + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + notification_channel = proto.Field( + proto.MESSAGE, + number=3, + message=notification.NotificationChannel, + ) + + +class DeleteNotificationChannelRequest(proto.Message): + r"""The ``DeleteNotificationChannel`` request. + Attributes: + name (str): + Required. The channel for which to execute the request. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] + force (bool): + If true, the notification channel will be + deleted regardless of its use in alert policies + (the policies will be updated to remove the + channel). If false, channels that are still + referenced by an existing alerting policy will + fail to be deleted in a delete operation. + """ + + name = proto.Field( + proto.STRING, + number=3, + ) + force = proto.Field( + proto.BOOL, + number=5, + ) + + +class SendNotificationChannelVerificationCodeRequest(proto.Message): + r"""The ``SendNotificationChannelVerificationCode`` request. + Attributes: + name (str): + Required. The notification channel to which + to send a verification code. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class GetNotificationChannelVerificationCodeRequest(proto.Message): + r"""The ``GetNotificationChannelVerificationCode`` request. + Attributes: + name (str): + Required. The notification channel for which + a verification code is to be generated and + retrieved. This must name a channel that is + already verified; if the specified channel is + not verified, the request will fail. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + The desired expiration time. If specified, + the API will guarantee that the returned code + will not be valid after the specified timestamp; + however, the API cannot guarantee that the + returned code will be valid for at least as long + as the requested time (the API puts an upper + bound on the amount of time for which a code may + be valid). If omitted, a default expiration will + be used, which may be less than the max + permissible expiration (so specifying an + expiration may extend the code's lifetime over + omitting an expiration, even though the API does + impose an upper limit on the maximum expiration + that is permitted). + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + expire_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class GetNotificationChannelVerificationCodeResponse(proto.Message): + r"""The ``GetNotificationChannelVerificationCode`` request. + Attributes: + code (str): + The verification code, which may be used to + verify other channels that have an equivalent + identity (i.e. other channels of the same type + with the same fingerprint such as other email + channels with the same email address or other + sms channels with the same number). + expire_time (google.protobuf.timestamp_pb2.Timestamp): + The expiration time associated with the code + that was returned. If an expiration was provided + in the request, this is the minimum of the + requested expiration in the request and the max + permitted expiration. + """ + + code = proto.Field( + proto.STRING, + number=1, + ) + expire_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class VerifyNotificationChannelRequest(proto.Message): + r"""The ``VerifyNotificationChannel`` request. + Attributes: + name (str): + Required. The notification channel to verify. + code (str): + Required. The verification code that was delivered to the + channel as a result of invoking the + ``SendNotificationChannelVerificationCode`` API method or + that was retrieved from a verified channel via + ``GetNotificationChannelVerificationCode``. For example, one + might have "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" (in + general, one is only guaranteed that the code is valid + UTF-8; one should not make any assumptions regarding the + structure or format of the code). + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + code = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/query_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/query_service.py new file mode 100644 index 00000000..0ea6c2d0 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/query_service.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + }, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service.py new file mode 100644 index 00000000..3a590100 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service.py @@ -0,0 +1,775 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.type import calendar_period_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'Service', + 'ServiceLevelObjective', + 'ServiceLevelIndicator', + 'BasicSli', + 'Range', + 'RequestBasedSli', + 'TimeSeriesRatio', + 'DistributionCut', + 'WindowsBasedSli', + }, +) + + +class Service(proto.Message): + r"""A ``Service`` is a discrete, autonomous, and network-accessible + unit, designed to solve an individual concern + (`Wikipedia `__). + In Cloud Monitoring, a ``Service`` acts as the root resource under + which operational aspects of the service are accessible. + + Attributes: + name (str): + Resource name for this Service. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + display_name (str): + Name used for UI elements listing this + Service. + custom (google.cloud.monitoring_v3.types.Service.Custom): + Custom service type. + app_engine (google.cloud.monitoring_v3.types.Service.AppEngine): + Type used for App Engine services. + cloud_endpoints (google.cloud.monitoring_v3.types.Service.CloudEndpoints): + Type used for Cloud Endpoints services. + cluster_istio (google.cloud.monitoring_v3.types.Service.ClusterIstio): + Type used for Istio services that live in a + Kubernetes cluster. + mesh_istio (google.cloud.monitoring_v3.types.Service.MeshIstio): + Type used for Istio services scoped to an + Istio mesh. + istio_canonical_service (google.cloud.monitoring_v3.types.Service.IstioCanonicalService): + Type used for canonical services scoped to an Istio mesh. + Metrics for Istio are `documented + here `__ + telemetry (google.cloud.monitoring_v3.types.Service.Telemetry): + Configuration for how to query telemetry on a + Service. + user_labels (Sequence[google.cloud.monitoring_v3.types.Service.UserLabelsEntry]): + Labels which have been used to annotate the + service. Label keys must start with a letter. + Label keys and values may contain lowercase + letters, numbers, underscores, and dashes. Label + keys and values have a maximum length of 63 + characters, and must be less than 128 bytes in + size. Up to 64 label entries may be stored. For + labels which do not have a semantic value, the + empty string may be supplied for the label + value. + """ + + class Custom(proto.Message): + r"""Custom view of service telemetry. Currently a place-holder + pending final design. + """ + + class AppEngine(proto.Message): + r"""App Engine service. Learn more at + https://cloud.google.com/appengine. + + Attributes: + module_id (str): + The ID of the App Engine module underlying this service. + Corresponds to the ``module_id`` resource label in the + ``gae_app`` monitored resource: + https://cloud.google.com/monitoring/api/resources#tag_gae_app + """ + + module_id = proto.Field( + proto.STRING, + number=1, + ) + + class CloudEndpoints(proto.Message): + r"""Cloud Endpoints service. Learn more at + https://cloud.google.com/endpoints. + + Attributes: + service (str): + The name of the Cloud Endpoints service underlying this + service. Corresponds to the ``service`` resource label in + the ``api`` monitored resource: + https://cloud.google.com/monitoring/api/resources#tag_api + """ + + service = proto.Field( + proto.STRING, + number=1, + ) + + class ClusterIstio(proto.Message): + r"""Istio service scoped to a single Kubernetes cluster. Learn + more at https://istio.io. Clusters running OSS Istio will have + their services ingested as this type. + + Attributes: + location (str): + The location of the Kubernetes cluster in which this Istio + service is defined. Corresponds to the ``location`` resource + label in ``k8s_cluster`` resources. + cluster_name (str): + The name of the Kubernetes cluster in which this Istio + service is defined. Corresponds to the ``cluster_name`` + resource label in ``k8s_cluster`` resources. + service_namespace (str): + The namespace of the Istio service underlying this service. + Corresponds to the ``destination_service_namespace`` metric + label in Istio metrics. + service_name (str): + The name of the Istio service underlying this service. + Corresponds to the ``destination_service_name`` metric label + in Istio metrics. + """ + + location = proto.Field( + proto.STRING, + number=1, + ) + cluster_name = proto.Field( + proto.STRING, + number=2, + ) + service_namespace = proto.Field( + proto.STRING, + number=3, + ) + service_name = proto.Field( + proto.STRING, + number=4, + ) + + class MeshIstio(proto.Message): + r"""Istio service scoped to an Istio mesh. Anthos clusters + running ASM < 1.6.8 will have their services ingested as this + type. + + Attributes: + mesh_uid (str): + Identifier for the mesh in which this Istio service is + defined. Corresponds to the ``mesh_uid`` metric label in + Istio metrics. + service_namespace (str): + The namespace of the Istio service underlying this service. + Corresponds to the ``destination_service_namespace`` metric + label in Istio metrics. + service_name (str): + The name of the Istio service underlying this service. + Corresponds to the ``destination_service_name`` metric label + in Istio metrics. + """ + + mesh_uid = proto.Field( + proto.STRING, + number=1, + ) + service_namespace = proto.Field( + proto.STRING, + number=3, + ) + service_name = proto.Field( + proto.STRING, + number=4, + ) + + class IstioCanonicalService(proto.Message): + r"""Canonical service scoped to an Istio mesh. Anthos clusters + running ASM >= 1.6.8 will have their services ingested as this + type. + + Attributes: + mesh_uid (str): + Identifier for the Istio mesh in which this canonical + service is defined. Corresponds to the ``mesh_uid`` metric + label in `Istio + metrics `__. + canonical_service_namespace (str): + The namespace of the canonical service underlying this + service. Corresponds to the + ``destination_canonical_service_namespace`` metric label in + `Istio + metrics `__. + canonical_service (str): + The name of the canonical service underlying this service. + Corresponds to the ``destination_canonical_service_name`` + metric label in label in `Istio + metrics `__. + """ + + mesh_uid = proto.Field( + proto.STRING, + number=1, + ) + canonical_service_namespace = proto.Field( + proto.STRING, + number=3, + ) + canonical_service = proto.Field( + proto.STRING, + number=4, + ) + + class Telemetry(proto.Message): + r"""Configuration for how to query telemetry on a Service. + Attributes: + resource_name (str): + The full name of the resource that defines this service. + Formatted as described in + https://cloud.google.com/apis/design/resource_names. + """ + + resource_name = proto.Field( + proto.STRING, + number=1, + ) + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + custom = proto.Field( + proto.MESSAGE, + number=6, + oneof='identifier', + message=Custom, + ) + app_engine = proto.Field( + proto.MESSAGE, + number=7, + oneof='identifier', + message=AppEngine, + ) + cloud_endpoints = proto.Field( + proto.MESSAGE, + number=8, + oneof='identifier', + message=CloudEndpoints, + ) + cluster_istio = proto.Field( + proto.MESSAGE, + number=9, + oneof='identifier', + message=ClusterIstio, + ) + mesh_istio = proto.Field( + proto.MESSAGE, + number=10, + oneof='identifier', + message=MeshIstio, + ) + istio_canonical_service = proto.Field( + proto.MESSAGE, + number=11, + oneof='identifier', + message=IstioCanonicalService, + ) + telemetry = proto.Field( + proto.MESSAGE, + number=13, + message=Telemetry, + ) + user_labels = proto.MapField( + proto.STRING, + proto.STRING, + number=14, + ) + + +class ServiceLevelObjective(proto.Message): + r"""A Service-Level Objective (SLO) describes a level of desired + good service. It consists of a service-level indicator (SLI), a + performance goal, and a period over which the objective is to be + evaluated against that goal. The SLO can use SLIs defined in a + number of different manners. Typical SLOs might include "99% of + requests in each rolling week have latency below 200 + milliseconds" or "99.5% of requests in each calendar month + return successfully." + + Attributes: + name (str): + Resource name for this ``ServiceLevelObjective``. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] + display_name (str): + Name used for UI elements listing this SLO. + service_level_indicator (google.cloud.monitoring_v3.types.ServiceLevelIndicator): + The definition of good service, used to measure and + calculate the quality of the ``Service``'s performance with + respect to a single aspect of service quality. + goal (float): + The fraction of service that must be good in order for this + objective to be met. ``0 < goal <= 0.999``. + rolling_period (google.protobuf.duration_pb2.Duration): + A rolling time period, semantically "in the past + ````". Must be an integer multiple of 1 day + no larger than 30 days. + calendar_period (google.type.calendar_period_pb2.CalendarPeriod): + A calendar period, semantically "since the start of the + current ````". At this time, only ``DAY``, + ``WEEK``, ``FORTNIGHT``, and ``MONTH`` are supported. + user_labels (Sequence[google.cloud.monitoring_v3.types.ServiceLevelObjective.UserLabelsEntry]): + Labels which have been used to annotate the + service-level objective. Label keys must start + with a letter. Label keys and values may contain + lowercase letters, numbers, underscores, and + dashes. Label keys and values have a maximum + length of 63 characters, and must be less than + 128 bytes in size. Up to 64 label entries may be + stored. For labels which do not have a semantic + value, the empty string may be supplied for the + label value. + """ + class View(proto.Enum): + r"""``ServiceLevelObjective.View`` determines what form of + ``ServiceLevelObjective`` is returned from + ``GetServiceLevelObjective``, ``ListServiceLevelObjectives``, and + ``ListServiceLevelObjectiveVersions`` RPCs. + """ + VIEW_UNSPECIFIED = 0 + FULL = 2 + EXPLICIT = 1 + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=11, + ) + service_level_indicator = proto.Field( + proto.MESSAGE, + number=3, + message='ServiceLevelIndicator', + ) + goal = proto.Field( + proto.DOUBLE, + number=4, + ) + rolling_period = proto.Field( + proto.MESSAGE, + number=5, + oneof='period', + message=duration_pb2.Duration, + ) + calendar_period = proto.Field( + proto.ENUM, + number=6, + oneof='period', + enum=calendar_period_pb2.CalendarPeriod, + ) + user_labels = proto.MapField( + proto.STRING, + proto.STRING, + number=12, + ) + + +class ServiceLevelIndicator(proto.Message): + r"""A Service-Level Indicator (SLI) describes the "performance" of a + service. For some services, the SLI is well-defined. In such cases, + the SLI can be described easily by referencing the well-known SLI + and providing the needed parameters. Alternatively, a "custom" SLI + can be defined with a query to the underlying metric store. An SLI + is defined to be ``good_service / total_service`` over any queried + time interval. The value of performance always falls into the range + ``0 <= performance <= 1``. A custom SLI describes how to compute + this ratio, whether this is by dividing values from a pair of time + series, cutting a ``Distribution`` into good and bad counts, or + counting time windows in which the service complies with a + criterion. For separation of concerns, a single Service-Level + Indicator measures performance for only one aspect of service + quality, such as fraction of successful queries or fast-enough + queries. + + Attributes: + basic_sli (google.cloud.monitoring_v3.types.BasicSli): + Basic SLI on a well-known service type. + request_based (google.cloud.monitoring_v3.types.RequestBasedSli): + Request-based SLIs + windows_based (google.cloud.monitoring_v3.types.WindowsBasedSli): + Windows-based SLIs + """ + + basic_sli = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message='BasicSli', + ) + request_based = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='RequestBasedSli', + ) + windows_based = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='WindowsBasedSli', + ) + + +class BasicSli(proto.Message): + r"""An SLI measuring performance on a well-known service type. + Performance will be computed on the basis of pre-defined metrics. + The type of the ``service_resource`` determines the metrics to use + and the ``service_resource.labels`` and ``metric_labels`` are used + to construct a monitoring filter to filter that metric down to just + the data relevant to this service. + + Attributes: + method (Sequence[str]): + OPTIONAL: The set of RPCs to which this SLI + is relevant. Telemetry from other methods will + not be used to calculate performance for this + SLI. If omitted, this SLI applies to all the + Service's methods. For service types that don't + support breaking down by method, setting this + field will result in an error. + location (Sequence[str]): + OPTIONAL: The set of locations to which this + SLI is relevant. Telemetry from other locations + will not be used to calculate performance for + this SLI. If omitted, this SLI applies to all + locations in which the Service has activity. For + service types that don't support breaking down + by location, setting this field will result in + an error. + version (Sequence[str]): + OPTIONAL: The set of API versions to which + this SLI is relevant. Telemetry from other API + versions will not be used to calculate + performance for this SLI. If omitted, this SLI + applies to all API versions. For service types + that don't support breaking down by version, + setting this field will result in an error. + availability (google.cloud.monitoring_v3.types.BasicSli.AvailabilityCriteria): + Good service is defined to be the count of + requests made to this service that return + successfully. + latency (google.cloud.monitoring_v3.types.BasicSli.LatencyCriteria): + Good service is defined to be the count of requests made to + this service that are fast enough with respect to + ``latency.threshold``. + """ + + class AvailabilityCriteria(proto.Message): + r"""Future parameters for the availability SLI. """ + + class LatencyCriteria(proto.Message): + r"""Parameters for a latency threshold SLI. + Attributes: + threshold (google.protobuf.duration_pb2.Duration): + Good service is defined to be the count of requests made to + this service that return in no more than ``threshold``. + """ + + threshold = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + method = proto.RepeatedField( + proto.STRING, + number=7, + ) + location = proto.RepeatedField( + proto.STRING, + number=8, + ) + version = proto.RepeatedField( + proto.STRING, + number=9, + ) + availability = proto.Field( + proto.MESSAGE, + number=2, + oneof='sli_criteria', + message=AvailabilityCriteria, + ) + latency = proto.Field( + proto.MESSAGE, + number=3, + oneof='sli_criteria', + message=LatencyCriteria, + ) + + +class Range(proto.Message): + r"""Range of numerical values within ``min`` and ``max``. + Attributes: + min_ (float): + Range minimum. + max_ (float): + Range maximum. + """ + + min_ = proto.Field( + proto.DOUBLE, + number=1, + ) + max_ = proto.Field( + proto.DOUBLE, + number=2, + ) + + +class RequestBasedSli(proto.Message): + r"""Service Level Indicators for which atomic units of service + are counted directly. + + Attributes: + good_total_ratio (google.cloud.monitoring_v3.types.TimeSeriesRatio): + ``good_total_ratio`` is used when the ratio of + ``good_service`` to ``total_service`` is computed from two + ``TimeSeries``. + distribution_cut (google.cloud.monitoring_v3.types.DistributionCut): + ``distribution_cut`` is used when ``good_service`` is a + count of values aggregated in a ``Distribution`` that fall + into a good range. The ``total_service`` is the total count + of all values aggregated in the ``Distribution``. + """ + + good_total_ratio = proto.Field( + proto.MESSAGE, + number=1, + oneof='method', + message='TimeSeriesRatio', + ) + distribution_cut = proto.Field( + proto.MESSAGE, + number=3, + oneof='method', + message='DistributionCut', + ) + + +class TimeSeriesRatio(proto.Message): + r"""A ``TimeSeriesRatio`` specifies two ``TimeSeries`` to use for + computing the ``good_service / total_service`` ratio. The specified + ``TimeSeries`` must have ``ValueType = DOUBLE`` or + ``ValueType = INT64`` and must have ``MetricKind = DELTA`` or + ``MetricKind = CUMULATIVE``. The ``TimeSeriesRatio`` must specify + exactly two of good, bad, and total, and the relationship + ``good_service + bad_service = total_service`` will be assumed. + + Attributes: + good_service_filter (str): + A `monitoring + filter `__ + specifying a ``TimeSeries`` quantifying good service + provided. Must have ``ValueType = DOUBLE`` or + ``ValueType = INT64`` and must have ``MetricKind = DELTA`` + or ``MetricKind = CUMULATIVE``. + bad_service_filter (str): + A `monitoring + filter `__ + specifying a ``TimeSeries`` quantifying bad service, either + demanded service that was not provided or demanded service + that was of inadequate quality. Must have + ``ValueType = DOUBLE`` or ``ValueType = INT64`` and must + have ``MetricKind = DELTA`` or ``MetricKind = CUMULATIVE``. + total_service_filter (str): + A `monitoring + filter `__ + specifying a ``TimeSeries`` quantifying total demanded + service. Must have ``ValueType = DOUBLE`` or + ``ValueType = INT64`` and must have ``MetricKind = DELTA`` + or ``MetricKind = CUMULATIVE``. + """ + + good_service_filter = proto.Field( + proto.STRING, + number=4, + ) + bad_service_filter = proto.Field( + proto.STRING, + number=5, + ) + total_service_filter = proto.Field( + proto.STRING, + number=6, + ) + + +class DistributionCut(proto.Message): + r"""A ``DistributionCut`` defines a ``TimeSeries`` and thresholds used + for measuring good service and total service. The ``TimeSeries`` + must have ``ValueType = DISTRIBUTION`` and ``MetricKind = DELTA`` or + ``MetricKind = CUMULATIVE``. The computed ``good_service`` will be + the estimated count of values in the ``Distribution`` that fall + within the specified ``min`` and ``max``. + + Attributes: + distribution_filter (str): + A `monitoring + filter `__ + specifying a ``TimeSeries`` aggregating values. Must have + ``ValueType = DISTRIBUTION`` and ``MetricKind = DELTA`` or + ``MetricKind = CUMULATIVE``. + range_ (google.cloud.monitoring_v3.types.Range): + Range of values considered "good." For a one- + ided range, set one bound to an infinite value. + """ + + distribution_filter = proto.Field( + proto.STRING, + number=4, + ) + range_ = proto.Field( + proto.MESSAGE, + number=5, + message='Range', + ) + + +class WindowsBasedSli(proto.Message): + r"""A ``WindowsBasedSli`` defines ``good_service`` as the count of time + windows for which the provided service was of good quality. Criteria + for determining if service was good are embedded in the + ``window_criterion``. + + Attributes: + good_bad_metric_filter (str): + A `monitoring + filter `__ + specifying a ``TimeSeries`` with ``ValueType = BOOL``. The + window is good if any ``true`` values appear in the window. + good_total_ratio_threshold (google.cloud.monitoring_v3.types.WindowsBasedSli.PerformanceThreshold): + A window is good if its ``performance`` is high enough. + metric_mean_in_range (google.cloud.monitoring_v3.types.WindowsBasedSli.MetricRange): + A window is good if the metric's value is in + a good range, averaged across returned streams. + metric_sum_in_range (google.cloud.monitoring_v3.types.WindowsBasedSli.MetricRange): + A window is good if the metric's value is in + a good range, summed across returned streams. + window_period (google.protobuf.duration_pb2.Duration): + Duration over which window quality is evaluated. Must be an + integer fraction of a day and at least ``60s``. + """ + + class PerformanceThreshold(proto.Message): + r"""A ``PerformanceThreshold`` is used when each window is good when + that window has a sufficiently high ``performance``. + + Attributes: + performance (google.cloud.monitoring_v3.types.RequestBasedSli): + ``RequestBasedSli`` to evaluate to judge window quality. + basic_sli_performance (google.cloud.monitoring_v3.types.BasicSli): + ``BasicSli`` to evaluate to judge window quality. + threshold (float): + If window ``performance >= threshold``, the window is + counted as good. + """ + + performance = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='RequestBasedSli', + ) + basic_sli_performance = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='BasicSli', + ) + threshold = proto.Field( + proto.DOUBLE, + number=2, + ) + + class MetricRange(proto.Message): + r"""A ``MetricRange`` is used when each window is good when the value x + of a single ``TimeSeries`` satisfies + ``range.min <= x <= range.max``. The provided ``TimeSeries`` must + have ``ValueType = INT64`` or ``ValueType = DOUBLE`` and + ``MetricKind = GAUGE``. + + Attributes: + time_series (str): + A `monitoring + filter `__ + specifying the ``TimeSeries`` to use for evaluating window + quality. + range_ (google.cloud.monitoring_v3.types.Range): + Range of values considered "good." For a one- + ided range, set one bound to an infinite value. + """ + + time_series = proto.Field( + proto.STRING, + number=1, + ) + range_ = proto.Field( + proto.MESSAGE, + number=4, + message='Range', + ) + + good_bad_metric_filter = proto.Field( + proto.STRING, + number=5, + oneof='window_criterion', + ) + good_total_ratio_threshold = proto.Field( + proto.MESSAGE, + number=2, + oneof='window_criterion', + message=PerformanceThreshold, + ) + metric_mean_in_range = proto.Field( + proto.MESSAGE, + number=6, + oneof='window_criterion', + message=MetricRange, + ) + metric_sum_in_range = proto.Field( + proto.MESSAGE, + number=7, + oneof='window_criterion', + message=MetricRange, + ) + window_period = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service_service.py new file mode 100644 index 00000000..b7f0c7a1 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service_service.py @@ -0,0 +1,416 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.monitoring_v3.types import service as gm_service +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'CreateServiceRequest', + 'GetServiceRequest', + 'ListServicesRequest', + 'ListServicesResponse', + 'UpdateServiceRequest', + 'DeleteServiceRequest', + 'CreateServiceLevelObjectiveRequest', + 'GetServiceLevelObjectiveRequest', + 'ListServiceLevelObjectivesRequest', + 'ListServiceLevelObjectivesResponse', + 'UpdateServiceLevelObjectiveRequest', + 'DeleteServiceLevelObjectiveRequest', + }, +) + + +class CreateServiceRequest(proto.Message): + r"""The ``CreateService`` request. + Attributes: + parent (str): + Required. Resource + `name `__ + of the parent workspace. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + service_id (str): + Optional. The Service id to use for this Service. If + omitted, an id will be generated instead. Must match the + pattern ``[a-z0-9\-]+`` + service (google.cloud.monitoring_v3.types.Service): + Required. The ``Service`` to create. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + service_id = proto.Field( + proto.STRING, + number=3, + ) + service = proto.Field( + proto.MESSAGE, + number=2, + message=gm_service.Service, + ) + + +class GetServiceRequest(proto.Message): + r"""The ``GetService`` request. + Attributes: + name (str): + Required. Resource name of the ``Service``. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListServicesRequest(proto.Message): + r"""The ``ListServices`` request. + Attributes: + parent (str): + Required. Resource name of the parent containing the listed + services, either a + `project `__ + or a Monitoring Workspace. The formats are: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + workspaces/[HOST_PROJECT_ID_OR_NUMBER] + filter (str): + A filter specifying what ``Service``\ s to return. The + filter currently supports the following fields: + + :: + + - `identifier_case` + - `app_engine.module_id` + - `cloud_endpoints.service` (reserved for future use) + - `mesh_istio.mesh_uid` + - `mesh_istio.service_namespace` + - `mesh_istio.service_name` + - `cluster_istio.location` (deprecated) + - `cluster_istio.cluster_name` (deprecated) + - `cluster_istio.service_namespace` (deprecated) + - `cluster_istio.service_name` (deprecated) + + ``identifier_case`` refers to which option in the identifier + oneof is populated. For example, the filter + ``identifier_case = "CUSTOM"`` would match all services with + a value for the ``custom`` field. Valid options are + "CUSTOM", "APP_ENGINE", "MESH_ISTIO", plus "CLUSTER_ISTIO" + (deprecated) and "CLOUD_ENDPOINTS" (reserved for future + use). + page_size (int): + A non-negative number that is the maximum + number of results to return. When 0, use default + page size. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return + additional results from the previous method call. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListServicesResponse(proto.Message): + r"""The ``ListServices`` response. + Attributes: + services (Sequence[google.cloud.monitoring_v3.types.Service]): + The ``Service``\ s matching the specified filter. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + """ + + @property + def raw_page(self): + return self + + services = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gm_service.Service, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateServiceRequest(proto.Message): + r"""The ``UpdateService`` request. + Attributes: + service (google.cloud.monitoring_v3.types.Service): + Required. The ``Service`` to draw updates from. The given + ``name`` specifies the resource to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A set of field paths defining which fields to + use for the update. + """ + + service = proto.Field( + proto.MESSAGE, + number=1, + message=gm_service.Service, + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteServiceRequest(proto.Message): + r"""The ``DeleteService`` request. + Attributes: + name (str): + Required. Resource name of the ``Service`` to delete. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateServiceLevelObjectiveRequest(proto.Message): + r"""The ``CreateServiceLevelObjective`` request. + Attributes: + parent (str): + Required. Resource name of the parent ``Service``. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + service_level_objective_id (str): + Optional. The ServiceLevelObjective id to use for this + ServiceLevelObjective. If omitted, an id will be generated + instead. Must match the pattern ``[a-z0-9\-]+`` + service_level_objective (google.cloud.monitoring_v3.types.ServiceLevelObjective): + Required. The ``ServiceLevelObjective`` to create. The + provided ``name`` will be respected if no + ``ServiceLevelObjective`` exists with this name. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + service_level_objective_id = proto.Field( + proto.STRING, + number=3, + ) + service_level_objective = proto.Field( + proto.MESSAGE, + number=2, + message=gm_service.ServiceLevelObjective, + ) + + +class GetServiceLevelObjectiveRequest(proto.Message): + r"""The ``GetServiceLevelObjective`` request. + Attributes: + name (str): + Required. Resource name of the ``ServiceLevelObjective`` to + get. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] + view (google.cloud.monitoring_v3.types.ServiceLevelObjective.View): + View of the ``ServiceLevelObjective`` to return. If + ``DEFAULT``, return the ``ServiceLevelObjective`` as + originally defined. If ``EXPLICIT`` and the + ``ServiceLevelObjective`` is defined in terms of a + ``BasicSli``, replace the ``BasicSli`` with a + ``RequestBasedSli`` spelling out how the SLI is computed. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.ENUM, + number=2, + enum=gm_service.ServiceLevelObjective.View, + ) + + +class ListServiceLevelObjectivesRequest(proto.Message): + r"""The ``ListServiceLevelObjectives`` request. + Attributes: + parent (str): + Required. Resource name of the parent containing the listed + SLOs, either a project or a Monitoring Workspace. The + formats are: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] + workspaces/[HOST_PROJECT_ID_OR_NUMBER]/services/- + filter (str): + A filter specifying what ``ServiceLevelObjective``\ s to + return. + page_size (int): + A non-negative number that is the maximum + number of results to return. When 0, use default + page size. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return + additional results from the previous method call. + view (google.cloud.monitoring_v3.types.ServiceLevelObjective.View): + View of the ``ServiceLevelObjective``\ s to return. If + ``DEFAULT``, return each ``ServiceLevelObjective`` as + originally defined. If ``EXPLICIT`` and the + ``ServiceLevelObjective`` is defined in terms of a + ``BasicSli``, replace the ``BasicSli`` with a + ``RequestBasedSli`` spelling out how the SLI is computed. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + view = proto.Field( + proto.ENUM, + number=5, + enum=gm_service.ServiceLevelObjective.View, + ) + + +class ListServiceLevelObjectivesResponse(proto.Message): + r"""The ``ListServiceLevelObjectives`` response. + Attributes: + service_level_objectives (Sequence[google.cloud.monitoring_v3.types.ServiceLevelObjective]): + The ``ServiceLevelObjective``\ s matching the specified + filter. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + """ + + @property + def raw_page(self): + return self + + service_level_objectives = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gm_service.ServiceLevelObjective, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateServiceLevelObjectiveRequest(proto.Message): + r"""The ``UpdateServiceLevelObjective`` request. + Attributes: + service_level_objective (google.cloud.monitoring_v3.types.ServiceLevelObjective): + Required. The ``ServiceLevelObjective`` to draw updates + from. The given ``name`` specifies the resource to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A set of field paths defining which fields to + use for the update. + """ + + service_level_objective = proto.Field( + proto.MESSAGE, + number=1, + message=gm_service.ServiceLevelObjective, + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteServiceLevelObjectiveRequest(proto.Message): + r"""The ``DeleteServiceLevelObjective`` request. + Attributes: + name (str): + Required. Resource name of the ``ServiceLevelObjective`` to + delete. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/span_context.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/span_context.py new file mode 100644 index 00000000..f7c37ffa --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/span_context.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'SpanContext', + }, +) + + +class SpanContext(proto.Message): + r"""The context of a span. This is attached to an + [Exemplar][google.api.Distribution.Exemplar] in + [Distribution][google.api.Distribution] values during aggregation. + + It contains the name of a span with format: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/traces/[TRACE_ID]/spans/[SPAN_ID] + + Attributes: + span_name (str): + The resource name of the span. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/traces/[TRACE_ID]/spans/[SPAN_ID] + + ``[TRACE_ID]`` is a unique identifier for a trace within a + project; it is a 32-character hexadecimal encoding of a + 16-byte array. + + ``[SPAN_ID]`` is a unique identifier for a span within a + trace; it is a 16-character hexadecimal encoding of an + 8-byte array. + """ + + span_name = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime.py new file mode 100644 index 00000000..c6310c72 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime.py @@ -0,0 +1,538 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'UptimeCheckRegion', + 'GroupResourceType', + 'InternalChecker', + 'UptimeCheckConfig', + 'UptimeCheckIp', + }, +) + + +class UptimeCheckRegion(proto.Enum): + r"""The regions from which an Uptime check can be run.""" + REGION_UNSPECIFIED = 0 + USA = 1 + EUROPE = 2 + SOUTH_AMERICA = 3 + ASIA_PACIFIC = 4 + + +class GroupResourceType(proto.Enum): + r"""The supported resource types that can be used as values of + ``group_resource.resource_type``. ``INSTANCE`` includes + ``gce_instance`` and ``aws_ec2_instance`` resource types. The + resource types ``gae_app`` and ``uptime_url`` are not valid here + because group checks on App Engine modules and URLs are not allowed. + """ + RESOURCE_TYPE_UNSPECIFIED = 0 + INSTANCE = 1 + AWS_ELB_LOAD_BALANCER = 2 + + +class InternalChecker(proto.Message): + r"""An internal checker allows Uptime checks to run on + private/internal GCP resources. + + Attributes: + name (str): + A unique resource name for this InternalChecker. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/internalCheckers/[INTERNAL_CHECKER_ID] + + ``[PROJECT_ID_OR_NUMBER]`` is the Stackdriver Workspace + project for the Uptime check config associated with the + internal checker. + display_name (str): + The checker's human-readable name. The + display name should be unique within a + Stackdriver Workspace in order to make it easier + to identify; however, uniqueness is not + enforced. + network (str): + The `GCP VPC + network `__ where the + internal resource lives (ex: "default"). + gcp_zone (str): + The GCP zone the Uptime check should egress from. Only + respected for internal Uptime checks, where internal_network + is specified. + peer_project_id (str): + The GCP project ID where the internal checker + lives. Not necessary the same as the Workspace + project. + state (google.cloud.monitoring_v3.types.InternalChecker.State): + The current operational state of the internal + checker. + """ + class State(proto.Enum): + r"""Operational states for an internal checker.""" + UNSPECIFIED = 0 + CREATING = 1 + RUNNING = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + network = proto.Field( + proto.STRING, + number=3, + ) + gcp_zone = proto.Field( + proto.STRING, + number=4, + ) + peer_project_id = proto.Field( + proto.STRING, + number=6, + ) + state = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + + +class UptimeCheckConfig(proto.Message): + r"""This message configures which resources and services to + monitor for availability. + + Attributes: + name (str): + A unique resource name for this Uptime check configuration. + The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] + + ``[PROJECT_ID_OR_NUMBER]`` is the Workspace host project + associated with the Uptime check. + + This field should be omitted when creating the Uptime check + configuration; on create, the resource name is assigned by + the server and included in the response. + display_name (str): + A human-friendly name for the Uptime check + configuration. The display name should be unique + within a Stackdriver Workspace in order to make + it easier to identify; however, uniqueness is + not enforced. Required. + monitored_resource (google.api.monitored_resource_pb2.MonitoredResource): + The `monitored + resource `__ + associated with the configuration. The following monitored + resource types are valid for this field: ``uptime_url``, + ``gce_instance``, ``gae_app``, ``aws_ec2_instance``, + ``aws_elb_load_balancer`` ``k8s_service`` + resource_group (google.cloud.monitoring_v3.types.UptimeCheckConfig.ResourceGroup): + The group resource associated with the + configuration. + http_check (google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck): + Contains information needed to make an HTTP + or HTTPS check. + tcp_check (google.cloud.monitoring_v3.types.UptimeCheckConfig.TcpCheck): + Contains information needed to make a TCP + check. + period (google.protobuf.duration_pb2.Duration): + How often, in seconds, the Uptime check is performed. + Currently, the only supported values are ``60s`` (1 minute), + ``300s`` (5 minutes), ``600s`` (10 minutes), and ``900s`` + (15 minutes). Optional, defaults to ``60s``. + timeout (google.protobuf.duration_pb2.Duration): + The maximum amount of time to wait for the + request to complete (must be between 1 and 60 + seconds). Required. + content_matchers (Sequence[google.cloud.monitoring_v3.types.UptimeCheckConfig.ContentMatcher]): + The content that is expected to appear in the data returned + by the target server against which the check is run. + Currently, only the first entry in the ``content_matchers`` + list is supported, and additional entries will be ignored. + This field is optional and should only be specified if a + content match is required as part of the/ Uptime check. + selected_regions (Sequence[google.cloud.monitoring_v3.types.UptimeCheckRegion]): + The list of regions from which the check will + be run. Some regions contain one location, and + others contain more than one. If this field is + specified, enough regions must be provided to + include a minimum of 3 locations. Not + specifying this field will result in Uptime + checks running from all available regions. + is_internal (bool): + If this is ``true``, then checks are made only from the + 'internal_checkers'. If it is ``false``, then checks are + made only from the 'selected_regions'. It is an error to + provide 'selected_regions' when is_internal is ``true``, or + to provide 'internal_checkers' when is_internal is + ``false``. + internal_checkers (Sequence[google.cloud.monitoring_v3.types.InternalChecker]): + The internal checkers that this check will egress from. If + ``is_internal`` is ``true`` and this list is empty, the + check will egress from all the InternalCheckers configured + for the project that owns this ``UptimeCheckConfig``. + """ + + class ResourceGroup(proto.Message): + r"""The resource submessage for group checks. It can be used + instead of a monitored resource, when multiple resources are + being monitored. + + Attributes: + group_id (str): + The group of resources being monitored. Should be only the + ``[GROUP_ID]``, and not the full-path + ``projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID]``. + resource_type (google.cloud.monitoring_v3.types.GroupResourceType): + The resource type of the group members. + """ + + group_id = proto.Field( + proto.STRING, + number=1, + ) + resource_type = proto.Field( + proto.ENUM, + number=2, + enum='GroupResourceType', + ) + + class HttpCheck(proto.Message): + r"""Information involved in an HTTP/HTTPS Uptime check request. + Attributes: + request_method (google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck.RequestMethod): + The HTTP request method to use for the check. If set to + ``METHOD_UNSPECIFIED`` then ``request_method`` defaults to + ``GET``. + use_ssl (bool): + If ``true``, use HTTPS instead of HTTP to run the check. + path (str): + Optional (defaults to "/"). The path to the page against + which to run the check. Will be combined with the ``host`` + (specified within the ``monitored_resource``) and ``port`` + to construct the full URL. If the provided path does not + begin with "/", a "/" will be prepended automatically. + port (int): + Optional (defaults to 80 when ``use_ssl`` is ``false``, and + 443 when ``use_ssl`` is ``true``). The TCP port on the HTTP + server against which to run the check. Will be combined with + host (specified within the ``monitored_resource``) and + ``path`` to construct the full URL. + auth_info (google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck.BasicAuthentication): + The authentication information. Optional when + creating an HTTP check; defaults to empty. + mask_headers (bool): + Boolean specifying whether to encrypt the header + information. Encryption should be specified for any headers + related to authentication that you do not wish to be seen + when retrieving the configuration. The server will be + responsible for encrypting the headers. On Get/List calls, + if ``mask_headers`` is set to ``true`` then the headers will + be obscured with ``******.`` + headers (Sequence[google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck.HeadersEntry]): + The list of headers to send as part of the + Uptime check request. If two headers have the + same key and different values, they should be + entered as a single header, with the value being + a comma-separated list of all the desired values + as described at + https://www.w3.org/Protocols/rfc2616/rfc2616.txt + (page 31). Entering two separate headers with + the same key in a Create call will cause the + first to be overwritten by the second. The + maximum number of headers allowed is 100. + content_type (google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck.ContentType): + The content type header to use for the check. The following + configurations result in errors: + + 1. Content type is specified in both the ``headers`` field + and the ``content_type`` field. + 2. Request method is ``GET`` and ``content_type`` is not + ``TYPE_UNSPECIFIED`` + 3. Request method is ``POST`` and ``content_type`` is + ``TYPE_UNSPECIFIED``. + 4. Request method is ``POST`` and a "Content-Type" header is + provided via ``headers`` field. The ``content_type`` + field should be used instead. + validate_ssl (bool): + Boolean specifying whether to include SSL certificate + validation as a part of the Uptime check. Only applies to + checks where ``monitored_resource`` is set to + ``uptime_url``. If ``use_ssl`` is ``false``, setting + ``validate_ssl`` to ``true`` has no effect. + body (bytes): + The request body associated with the HTTP POST request. If + ``content_type`` is ``URL_ENCODED``, the body passed in must + be URL-encoded. Users can provide a ``Content-Length`` + header via the ``headers`` field or the API will do so. If + the ``request_method`` is ``GET`` and ``body`` is not empty, + the API will return an error. The maximum byte size is 1 + megabyte. Note: As with all ``bytes`` fields, JSON + representations are base64 encoded. e.g.: "foo=bar" in + URL-encoded form is "foo%3Dbar" and in base64 encoding is + "Zm9vJTI1M0RiYXI=". + """ + class RequestMethod(proto.Enum): + r"""The HTTP request method options.""" + METHOD_UNSPECIFIED = 0 + GET = 1 + POST = 2 + + class ContentType(proto.Enum): + r"""Header options corresponding to the content type of a HTTP + request body. + """ + TYPE_UNSPECIFIED = 0 + URL_ENCODED = 1 + + class BasicAuthentication(proto.Message): + r"""The authentication parameters to provide to the specified resource + or URL that requires a username and password. Currently, only `Basic + HTTP authentication `__ is + supported in Uptime checks. + + Attributes: + username (str): + The username to use when authenticating with + the HTTP server. + password (str): + The password to use when authenticating with + the HTTP server. + """ + + username = proto.Field( + proto.STRING, + number=1, + ) + password = proto.Field( + proto.STRING, + number=2, + ) + + request_method = proto.Field( + proto.ENUM, + number=8, + enum='UptimeCheckConfig.HttpCheck.RequestMethod', + ) + use_ssl = proto.Field( + proto.BOOL, + number=1, + ) + path = proto.Field( + proto.STRING, + number=2, + ) + port = proto.Field( + proto.INT32, + number=3, + ) + auth_info = proto.Field( + proto.MESSAGE, + number=4, + message='UptimeCheckConfig.HttpCheck.BasicAuthentication', + ) + mask_headers = proto.Field( + proto.BOOL, + number=5, + ) + headers = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + content_type = proto.Field( + proto.ENUM, + number=9, + enum='UptimeCheckConfig.HttpCheck.ContentType', + ) + validate_ssl = proto.Field( + proto.BOOL, + number=7, + ) + body = proto.Field( + proto.BYTES, + number=10, + ) + + class TcpCheck(proto.Message): + r"""Information required for a TCP Uptime check request. + Attributes: + port (int): + The TCP port on the server against which to run the check. + Will be combined with host (specified within the + ``monitored_resource``) to construct the full URL. Required. + """ + + port = proto.Field( + proto.INT32, + number=1, + ) + + class ContentMatcher(proto.Message): + r"""Optional. Used to perform content matching. This allows + matching based on substrings and regular expressions, together + with their negations. Only the first 4 MB of an HTTP or + HTTPS check's response (and the first 1 MB of a TCP check's + response) are examined for purposes of content matching. + + Attributes: + content (str): + String or regex content to match. Maximum 1024 bytes. An + empty ``content`` string indicates no content matching is to + be performed. + matcher (google.cloud.monitoring_v3.types.UptimeCheckConfig.ContentMatcher.ContentMatcherOption): + The type of content matcher that will be applied to the + server output, compared to the ``content`` string when the + check is run. + """ + class ContentMatcherOption(proto.Enum): + r"""Options to perform content matching.""" + CONTENT_MATCHER_OPTION_UNSPECIFIED = 0 + CONTAINS_STRING = 1 + NOT_CONTAINS_STRING = 2 + MATCHES_REGEX = 3 + NOT_MATCHES_REGEX = 4 + + content = proto.Field( + proto.STRING, + number=1, + ) + matcher = proto.Field( + proto.ENUM, + number=2, + enum='UptimeCheckConfig.ContentMatcher.ContentMatcherOption', + ) + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + monitored_resource = proto.Field( + proto.MESSAGE, + number=3, + oneof='resource', + message=monitored_resource_pb2.MonitoredResource, + ) + resource_group = proto.Field( + proto.MESSAGE, + number=4, + oneof='resource', + message=ResourceGroup, + ) + http_check = proto.Field( + proto.MESSAGE, + number=5, + oneof='check_request_type', + message=HttpCheck, + ) + tcp_check = proto.Field( + proto.MESSAGE, + number=6, + oneof='check_request_type', + message=TcpCheck, + ) + period = proto.Field( + proto.MESSAGE, + number=7, + message=duration_pb2.Duration, + ) + timeout = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + content_matchers = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=ContentMatcher, + ) + selected_regions = proto.RepeatedField( + proto.ENUM, + number=10, + enum='UptimeCheckRegion', + ) + is_internal = proto.Field( + proto.BOOL, + number=15, + ) + internal_checkers = proto.RepeatedField( + proto.MESSAGE, + number=14, + message='InternalChecker', + ) + + +class UptimeCheckIp(proto.Message): + r"""Contains the region, location, and list of IP + addresses where checkers in the location run from. + + Attributes: + region (google.cloud.monitoring_v3.types.UptimeCheckRegion): + A broad region category in which the IP + address is located. + location (str): + A more specific location within the region + that typically encodes a particular + city/town/metro (and its containing + state/province or country) within the broader + umbrella region category. + ip_address (str): + The IP address from which the Uptime check + originates. This is a fully specified IP address + (not an IP address range). Most IP addresses, as + of this publication, are in IPv4 format; + however, one should not rely on the IP addresses + being in IPv4 format indefinitely, and should + support interpreting this field in either IPv4 + or IPv6 format. + """ + + region = proto.Field( + proto.ENUM, + number=1, + enum='UptimeCheckRegion', + ) + location = proto.Field( + proto.STRING, + number=2, + ) + ip_address = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime_service.py new file mode 100644 index 00000000..2a1fbf90 --- /dev/null +++ b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime_service.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.monitoring_v3.types import uptime +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.monitoring.v3', + manifest={ + 'ListUptimeCheckConfigsRequest', + 'ListUptimeCheckConfigsResponse', + 'GetUptimeCheckConfigRequest', + 'CreateUptimeCheckConfigRequest', + 'UpdateUptimeCheckConfigRequest', + 'DeleteUptimeCheckConfigRequest', + 'ListUptimeCheckIpsRequest', + 'ListUptimeCheckIpsResponse', + }, +) + + +class ListUptimeCheckConfigsRequest(proto.Message): + r"""The protocol for the ``ListUptimeCheckConfigs`` request. + Attributes: + parent (str): + Required. The + `project `__ + whose Uptime check configurations are listed. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + page_size (int): + The maximum number of results to return in a single + response. The server may further constrain the maximum + number of results returned in a single page. If the + page_size is <=0, the server will decide the number of + results to be returned. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return more + results from the previous method call. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListUptimeCheckConfigsResponse(proto.Message): + r"""The protocol for the ``ListUptimeCheckConfigs`` response. + Attributes: + uptime_check_configs (Sequence[google.cloud.monitoring_v3.types.UptimeCheckConfig]): + The returned Uptime check configurations. + next_page_token (str): + This field represents the pagination token to retrieve the + next page of results. If the value is empty, it means no + further results for the request. To retrieve the next page + of results, the value of the next_page_token is passed to + the subsequent List method call (in the request message's + page_token field). + total_size (int): + The total number of Uptime check + configurations for the project, irrespective of + any pagination. + """ + + @property + def raw_page(self): + return self + + uptime_check_configs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=uptime.UptimeCheckConfig, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + total_size = proto.Field( + proto.INT32, + number=3, + ) + + +class GetUptimeCheckConfigRequest(proto.Message): + r"""The protocol for the ``GetUptimeCheckConfig`` request. + Attributes: + name (str): + Required. The Uptime check configuration to retrieve. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateUptimeCheckConfigRequest(proto.Message): + r"""The protocol for the ``CreateUptimeCheckConfig`` request. + Attributes: + parent (str): + Required. The + `project `__ + in which to create the Uptime check. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + uptime_check_config (google.cloud.monitoring_v3.types.UptimeCheckConfig): + Required. The new Uptime check configuration. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + uptime_check_config = proto.Field( + proto.MESSAGE, + number=2, + message=uptime.UptimeCheckConfig, + ) + + +class UpdateUptimeCheckConfigRequest(proto.Message): + r"""The protocol for the ``UpdateUptimeCheckConfig`` request. + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. If present, only the listed fields + in the current Uptime check configuration are + updated with values from the new configuration. + If this field is empty, then the current + configuration is completely replaced with the + new configuration. + uptime_check_config (google.cloud.monitoring_v3.types.UptimeCheckConfig): + Required. If an ``updateMask`` has been specified, this + field gives the values for the set of fields mentioned in + the ``updateMask``. If an ``updateMask`` has not been given, + this Uptime check configuration replaces the current + configuration. If a field is mentioned in ``updateMask`` but + the corresonding field is omitted in this partial Uptime + check configuration, it has the effect of deleting/clearing + the field from the configuration on the server. + + The following fields can be updated: ``display_name``, + ``http_check``, ``tcp_check``, ``timeout``, + ``content_matchers``, and ``selected_regions``. + """ + + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + uptime_check_config = proto.Field( + proto.MESSAGE, + number=3, + message=uptime.UptimeCheckConfig, + ) + + +class DeleteUptimeCheckConfigRequest(proto.Message): + r"""The protocol for the ``DeleteUptimeCheckConfig`` request. + Attributes: + name (str): + Required. The Uptime check configuration to delete. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListUptimeCheckIpsRequest(proto.Message): + r"""The protocol for the ``ListUptimeCheckIps`` request. + Attributes: + page_size (int): + The maximum number of results to return in a single + response. The server may further constrain the maximum + number of results returned in a single page. If the + page_size is <=0, the server will decide the number of + results to be returned. NOTE: this field is not yet + implemented + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return more + results from the previous method call. NOTE: this field is + not yet implemented + """ + + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListUptimeCheckIpsResponse(proto.Message): + r"""The protocol for the ``ListUptimeCheckIps`` response. + Attributes: + uptime_check_ips (Sequence[google.cloud.monitoring_v3.types.UptimeCheckIp]): + The returned list of IP addresses (including + region and location) that the checkers run from. + next_page_token (str): + This field represents the pagination token to retrieve the + next page of results. If the value is empty, it means no + further results for the request. To retrieve the next page + of results, the value of the next_page_token is passed to + the subsequent List method call (in the request message's + page_token field). NOTE: this field is not yet implemented + """ + + @property + def raw_page(self): + return self + + uptime_check_ips = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=uptime.UptimeCheckIp, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/mypy.ini b/owl-bot-staging/v3/mypy.ini new file mode 100644 index 00000000..4505b485 --- /dev/null +++ b/owl-bot-staging/v3/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/v3/noxfile.py b/owl-bot-staging/v3/noxfile.py new file mode 100644 index 00000000..8fd2eb88 --- /dev/null +++ b/owl-bot-staging/v3/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/monitoring_v3/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/v3/scripts/fixup_monitoring_v3_keywords.py b/owl-bot-staging/v3/scripts/fixup_monitoring_v3_keywords.py new file mode 100644 index 00000000..e2608d8a --- /dev/null +++ b/owl-bot-staging/v3/scripts/fixup_monitoring_v3_keywords.py @@ -0,0 +1,221 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class monitoringCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_alert_policy': ('name', 'alert_policy', ), + 'create_group': ('name', 'group', 'validate_only', ), + 'create_metric_descriptor': ('name', 'metric_descriptor', ), + 'create_notification_channel': ('name', 'notification_channel', ), + 'create_service': ('parent', 'service', 'service_id', ), + 'create_service_level_objective': ('parent', 'service_level_objective', 'service_level_objective_id', ), + 'create_time_series': ('name', 'time_series', ), + 'create_uptime_check_config': ('parent', 'uptime_check_config', ), + 'delete_alert_policy': ('name', ), + 'delete_group': ('name', 'recursive', ), + 'delete_metric_descriptor': ('name', ), + 'delete_notification_channel': ('name', 'force', ), + 'delete_service': ('name', ), + 'delete_service_level_objective': ('name', ), + 'delete_uptime_check_config': ('name', ), + 'get_alert_policy': ('name', ), + 'get_group': ('name', ), + 'get_metric_descriptor': ('name', ), + 'get_monitored_resource_descriptor': ('name', ), + 'get_notification_channel': ('name', ), + 'get_notification_channel_descriptor': ('name', ), + 'get_notification_channel_verification_code': ('name', 'expire_time', ), + 'get_service': ('name', ), + 'get_service_level_objective': ('name', 'view', ), + 'get_uptime_check_config': ('name', ), + 'list_alert_policies': ('name', 'filter', 'order_by', 'page_size', 'page_token', ), + 'list_group_members': ('name', 'page_size', 'page_token', 'filter', 'interval', ), + 'list_groups': ('name', 'children_of_group', 'ancestors_of_group', 'descendants_of_group', 'page_size', 'page_token', ), + 'list_metric_descriptors': ('name', 'filter', 'page_size', 'page_token', ), + 'list_monitored_resource_descriptors': ('name', 'filter', 'page_size', 'page_token', ), + 'list_notification_channel_descriptors': ('name', 'page_size', 'page_token', ), + 'list_notification_channels': ('name', 'filter', 'order_by', 'page_size', 'page_token', ), + 'list_service_level_objectives': ('parent', 'filter', 'page_size', 'page_token', 'view', ), + 'list_services': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_time_series': ('name', 'filter', 'interval', 'view', 'aggregation', 'secondary_aggregation', 'order_by', 'page_size', 'page_token', ), + 'list_uptime_check_configs': ('parent', 'page_size', 'page_token', ), + 'list_uptime_check_ips': ('page_size', 'page_token', ), + 'query_time_series': ('name', 'query', 'page_size', 'page_token', ), + 'send_notification_channel_verification_code': ('name', ), + 'update_alert_policy': ('alert_policy', 'update_mask', ), + 'update_group': ('group', 'validate_only', ), + 'update_notification_channel': ('notification_channel', 'update_mask', ), + 'update_service': ('service', 'update_mask', ), + 'update_service_level_objective': ('service_level_objective', 'update_mask', ), + 'update_uptime_check_config': ('uptime_check_config', 'update_mask', ), + 'verify_notification_channel': ('name', 'code', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=monitoringCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the monitoring client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v3/setup.py b/owl-bot-staging/v3/setup.py new file mode 100644 index 00000000..375a4601 --- /dev/null +++ b/owl-bot-staging/v3/setup.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-monitoring', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/v3/tests/__init__.py b/owl-bot-staging/v3/tests/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v3/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v3/tests/unit/__init__.py b/owl-bot-staging/v3/tests/unit/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v3/tests/unit/gapic/__init__.py b/owl-bot-staging/v3/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/__init__.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py new file mode 100644 index 00000000..d1966f81 --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py @@ -0,0 +1,2365 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.monitoring_v3.services.alert_policy_service import AlertPolicyServiceAsyncClient +from google.cloud.monitoring_v3.services.alert_policy_service import AlertPolicyServiceClient +from google.cloud.monitoring_v3.services.alert_policy_service import pagers +from google.cloud.monitoring_v3.services.alert_policy_service import transports +from google.cloud.monitoring_v3.services.alert_policy_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.monitoring_v3.types import alert +from google.cloud.monitoring_v3.types import alert_service +from google.cloud.monitoring_v3.types import common +from google.cloud.monitoring_v3.types import mutation_record +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AlertPolicyServiceClient._get_default_mtls_endpoint(None) is None + assert AlertPolicyServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert AlertPolicyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert AlertPolicyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert AlertPolicyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert AlertPolicyServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + AlertPolicyServiceClient, + AlertPolicyServiceAsyncClient, +]) +def test_alert_policy_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.AlertPolicyServiceGrpcTransport, "grpc"), + (transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_alert_policy_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + AlertPolicyServiceClient, + AlertPolicyServiceAsyncClient, +]) +def test_alert_policy_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_alert_policy_service_client_get_transport_class(): + transport = AlertPolicyServiceClient.get_transport_class() + available_transports = [ + transports.AlertPolicyServiceGrpcTransport, + ] + assert transport in available_transports + + transport = AlertPolicyServiceClient.get_transport_class("grpc") + assert transport == transports.AlertPolicyServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc"), + (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(AlertPolicyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlertPolicyServiceClient)) +@mock.patch.object(AlertPolicyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlertPolicyServiceAsyncClient)) +def test_alert_policy_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AlertPolicyServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AlertPolicyServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc", "true"), + (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc", "false"), + (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(AlertPolicyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlertPolicyServiceClient)) +@mock.patch.object(AlertPolicyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlertPolicyServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_alert_policy_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc"), + (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_alert_policy_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc"), + (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_alert_policy_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_alert_policy_service_client_client_options_from_dict(): + with mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = AlertPolicyServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_alert_policies(transport: str = 'grpc', request_type=alert_service.ListAlertPoliciesRequest): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert_service.ListAlertPoliciesResponse( + next_page_token='next_page_token_value', + total_size=1086, + ) + response = client.list_alert_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.ListAlertPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAlertPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.total_size == 1086 + + +def test_list_alert_policies_from_dict(): + test_list_alert_policies(request_type=dict) + + +def test_list_alert_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__') as call: + client.list_alert_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.ListAlertPoliciesRequest() + + +@pytest.mark.asyncio +async def test_list_alert_policies_async(transport: str = 'grpc_asyncio', request_type=alert_service.ListAlertPoliciesRequest): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(alert_service.ListAlertPoliciesResponse( + next_page_token='next_page_token_value', + total_size=1086, + )) + response = await client.list_alert_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.ListAlertPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAlertPoliciesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_alert_policies_async_from_dict(): + await test_list_alert_policies_async(request_type=dict) + + +def test_list_alert_policies_field_headers(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.ListAlertPoliciesRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__') as call: + call.return_value = alert_service.ListAlertPoliciesResponse() + client.list_alert_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_alert_policies_field_headers_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.ListAlertPoliciesRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert_service.ListAlertPoliciesResponse()) + await client.list_alert_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_alert_policies_flattened(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert_service.ListAlertPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_alert_policies( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_list_alert_policies_flattened_error(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_alert_policies( + alert_service.ListAlertPoliciesRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_list_alert_policies_flattened_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert_service.ListAlertPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert_service.ListAlertPoliciesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_alert_policies( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_list_alert_policies_flattened_error_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_alert_policies( + alert_service.ListAlertPoliciesRequest(), + name='name_value', + ) + + +def test_list_alert_policies_pager(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + alert.AlertPolicy(), + alert.AlertPolicy(), + ], + next_page_token='abc', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[], + next_page_token='def', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + ], + next_page_token='ghi', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + alert.AlertPolicy(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.list_alert_policies(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, alert.AlertPolicy) + for i in results) + +def test_list_alert_policies_pages(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + alert.AlertPolicy(), + alert.AlertPolicy(), + ], + next_page_token='abc', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[], + next_page_token='def', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + ], + next_page_token='ghi', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + alert.AlertPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_alert_policies(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_alert_policies_async_pager(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + alert.AlertPolicy(), + alert.AlertPolicy(), + ], + next_page_token='abc', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[], + next_page_token='def', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + ], + next_page_token='ghi', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + alert.AlertPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_alert_policies(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, alert.AlertPolicy) + for i in responses) + +@pytest.mark.asyncio +async def test_list_alert_policies_async_pages(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_alert_policies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + alert.AlertPolicy(), + alert.AlertPolicy(), + ], + next_page_token='abc', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[], + next_page_token='def', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + ], + next_page_token='ghi', + ), + alert_service.ListAlertPoliciesResponse( + alert_policies=[ + alert.AlertPolicy(), + alert.AlertPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_alert_policies(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_alert_policy(transport: str = 'grpc', request_type=alert_service.GetAlertPolicyRequest): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert.AlertPolicy( + name='name_value', + display_name='display_name_value', + combiner=alert.AlertPolicy.ConditionCombinerType.AND, + notification_channels=['notification_channels_value'], + ) + response = client.get_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.GetAlertPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, alert.AlertPolicy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND + assert response.notification_channels == ['notification_channels_value'] + + +def test_get_alert_policy_from_dict(): + test_get_alert_policy(request_type=dict) + + +def test_get_alert_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_alert_policy), + '__call__') as call: + client.get_alert_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.GetAlertPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_alert_policy_async(transport: str = 'grpc_asyncio', request_type=alert_service.GetAlertPolicyRequest): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy( + name='name_value', + display_name='display_name_value', + combiner=alert.AlertPolicy.ConditionCombinerType.AND, + notification_channels=['notification_channels_value'], + )) + response = await client.get_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.GetAlertPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, alert.AlertPolicy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND + assert response.notification_channels == ['notification_channels_value'] + + +@pytest.mark.asyncio +async def test_get_alert_policy_async_from_dict(): + await test_get_alert_policy_async(request_type=dict) + + +def test_get_alert_policy_field_headers(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.GetAlertPolicyRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_alert_policy), + '__call__') as call: + call.return_value = alert.AlertPolicy() + client.get_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_alert_policy_field_headers_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.GetAlertPolicyRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_alert_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) + await client.get_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_alert_policy_flattened(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert.AlertPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_alert_policy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_alert_policy_flattened_error(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_alert_policy( + alert_service.GetAlertPolicyRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_alert_policy_flattened_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert.AlertPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_alert_policy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_alert_policy_flattened_error_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_alert_policy( + alert_service.GetAlertPolicyRequest(), + name='name_value', + ) + + +def test_create_alert_policy(transport: str = 'grpc', request_type=alert_service.CreateAlertPolicyRequest): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert.AlertPolicy( + name='name_value', + display_name='display_name_value', + combiner=alert.AlertPolicy.ConditionCombinerType.AND, + notification_channels=['notification_channels_value'], + ) + response = client.create_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.CreateAlertPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, alert.AlertPolicy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND + assert response.notification_channels == ['notification_channels_value'] + + +def test_create_alert_policy_from_dict(): + test_create_alert_policy(request_type=dict) + + +def test_create_alert_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_alert_policy), + '__call__') as call: + client.create_alert_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.CreateAlertPolicyRequest() + + +@pytest.mark.asyncio +async def test_create_alert_policy_async(transport: str = 'grpc_asyncio', request_type=alert_service.CreateAlertPolicyRequest): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy( + name='name_value', + display_name='display_name_value', + combiner=alert.AlertPolicy.ConditionCombinerType.AND, + notification_channels=['notification_channels_value'], + )) + response = await client.create_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.CreateAlertPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, alert.AlertPolicy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND + assert response.notification_channels == ['notification_channels_value'] + + +@pytest.mark.asyncio +async def test_create_alert_policy_async_from_dict(): + await test_create_alert_policy_async(request_type=dict) + + +def test_create_alert_policy_field_headers(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.CreateAlertPolicyRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_alert_policy), + '__call__') as call: + call.return_value = alert.AlertPolicy() + client.create_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_alert_policy_field_headers_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.CreateAlertPolicyRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_alert_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) + await client.create_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_alert_policy_flattened(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert.AlertPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_alert_policy( + name='name_value', + alert_policy=alert.AlertPolicy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].alert_policy == alert.AlertPolicy(name='name_value') + + +def test_create_alert_policy_flattened_error(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_alert_policy( + alert_service.CreateAlertPolicyRequest(), + name='name_value', + alert_policy=alert.AlertPolicy(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_alert_policy_flattened_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert.AlertPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_alert_policy( + name='name_value', + alert_policy=alert.AlertPolicy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].alert_policy == alert.AlertPolicy(name='name_value') + + +@pytest.mark.asyncio +async def test_create_alert_policy_flattened_error_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_alert_policy( + alert_service.CreateAlertPolicyRequest(), + name='name_value', + alert_policy=alert.AlertPolicy(name='name_value'), + ) + + +def test_delete_alert_policy(transport: str = 'grpc', request_type=alert_service.DeleteAlertPolicyRequest): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.DeleteAlertPolicyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_alert_policy_from_dict(): + test_delete_alert_policy(request_type=dict) + + +def test_delete_alert_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_alert_policy), + '__call__') as call: + client.delete_alert_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.DeleteAlertPolicyRequest() + + +@pytest.mark.asyncio +async def test_delete_alert_policy_async(transport: str = 'grpc_asyncio', request_type=alert_service.DeleteAlertPolicyRequest): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.DeleteAlertPolicyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_alert_policy_async_from_dict(): + await test_delete_alert_policy_async(request_type=dict) + + +def test_delete_alert_policy_field_headers(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.DeleteAlertPolicyRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_alert_policy), + '__call__') as call: + call.return_value = None + client.delete_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_alert_policy_field_headers_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.DeleteAlertPolicyRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_alert_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_alert_policy_flattened(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_alert_policy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_alert_policy_flattened_error(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_alert_policy( + alert_service.DeleteAlertPolicyRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_alert_policy_flattened_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_alert_policy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_alert_policy_flattened_error_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_alert_policy( + alert_service.DeleteAlertPolicyRequest(), + name='name_value', + ) + + +def test_update_alert_policy(transport: str = 'grpc', request_type=alert_service.UpdateAlertPolicyRequest): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert.AlertPolicy( + name='name_value', + display_name='display_name_value', + combiner=alert.AlertPolicy.ConditionCombinerType.AND, + notification_channels=['notification_channels_value'], + ) + response = client.update_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.UpdateAlertPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, alert.AlertPolicy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND + assert response.notification_channels == ['notification_channels_value'] + + +def test_update_alert_policy_from_dict(): + test_update_alert_policy(request_type=dict) + + +def test_update_alert_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_policy), + '__call__') as call: + client.update_alert_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.UpdateAlertPolicyRequest() + + +@pytest.mark.asyncio +async def test_update_alert_policy_async(transport: str = 'grpc_asyncio', request_type=alert_service.UpdateAlertPolicyRequest): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy( + name='name_value', + display_name='display_name_value', + combiner=alert.AlertPolicy.ConditionCombinerType.AND, + notification_channels=['notification_channels_value'], + )) + response = await client.update_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == alert_service.UpdateAlertPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, alert.AlertPolicy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND + assert response.notification_channels == ['notification_channels_value'] + + +@pytest.mark.asyncio +async def test_update_alert_policy_async_from_dict(): + await test_update_alert_policy_async(request_type=dict) + + +def test_update_alert_policy_field_headers(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.UpdateAlertPolicyRequest() + + request.alert_policy.name = 'alert_policy.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_policy), + '__call__') as call: + call.return_value = alert.AlertPolicy() + client.update_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'alert_policy.name=alert_policy.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_alert_policy_field_headers_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = alert_service.UpdateAlertPolicyRequest() + + request.alert_policy.name = 'alert_policy.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) + await client.update_alert_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'alert_policy.name=alert_policy.name/value', + ) in kw['metadata'] + + +def test_update_alert_policy_flattened(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert.AlertPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_alert_policy( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + alert_policy=alert.AlertPolicy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + assert args[0].alert_policy == alert.AlertPolicy(name='name_value') + + +def test_update_alert_policy_flattened_error(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_alert_policy( + alert_service.UpdateAlertPolicyRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + alert_policy=alert.AlertPolicy(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_update_alert_policy_flattened_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_alert_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = alert.AlertPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_alert_policy( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + alert_policy=alert.AlertPolicy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + assert args[0].alert_policy == alert.AlertPolicy(name='name_value') + + +@pytest.mark.asyncio +async def test_update_alert_policy_flattened_error_async(): + client = AlertPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_alert_policy( + alert_service.UpdateAlertPolicyRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + alert_policy=alert.AlertPolicy(name='name_value'), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AlertPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AlertPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlertPolicyServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AlertPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlertPolicyServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlertPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AlertPolicyServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlertPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AlertPolicyServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.AlertPolicyServiceGrpcTransport, + transports.AlertPolicyServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AlertPolicyServiceGrpcTransport, + ) + +def test_alert_policy_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AlertPolicyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_alert_policy_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.AlertPolicyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_alert_policies', + 'get_alert_policy', + 'create_alert_policy', + 'delete_alert_policy', + 'update_alert_policy', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_alert_policy_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlertPolicyServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_alert_policy_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlertPolicyServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ), + quota_project_id="octopus", + ) + + +def test_alert_policy_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlertPolicyServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_alert_policy_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AlertPolicyServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_alert_policy_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AlertPolicyServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlertPolicyServiceGrpcTransport, + transports.AlertPolicyServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_alert_policy_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlertPolicyServiceGrpcTransport, + transports.AlertPolicyServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_alert_policy_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AlertPolicyServiceGrpcTransport, grpc_helpers), + (transports.AlertPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_alert_policy_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "monitoring.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + scopes=["1", "2"], + default_host="monitoring.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.AlertPolicyServiceGrpcTransport, transports.AlertPolicyServiceGrpcAsyncIOTransport]) +def test_alert_policy_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_alert_policy_service_host_no_port(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), + ) + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_alert_policy_service_host_with_port(): + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), + ) + assert client.transport._host == 'monitoring.googleapis.com:8000' + +def test_alert_policy_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlertPolicyServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_alert_policy_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlertPolicyServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.AlertPolicyServiceGrpcTransport, transports.AlertPolicyServiceGrpcAsyncIOTransport]) +def test_alert_policy_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.AlertPolicyServiceGrpcTransport, transports.AlertPolicyServiceGrpcAsyncIOTransport]) +def test_alert_policy_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_alert_policy_path(): + project = "squid" + alert_policy = "clam" + expected = "projects/{project}/alertPolicies/{alert_policy}".format(project=project, alert_policy=alert_policy, ) + actual = AlertPolicyServiceClient.alert_policy_path(project, alert_policy) + assert expected == actual + + +def test_parse_alert_policy_path(): + expected = { + "project": "whelk", + "alert_policy": "octopus", + } + path = AlertPolicyServiceClient.alert_policy_path(**expected) + + # Check that the path construction is reversible. + actual = AlertPolicyServiceClient.parse_alert_policy_path(path) + assert expected == actual + +def test_alert_policy_condition_path(): + project = "oyster" + alert_policy = "nudibranch" + condition = "cuttlefish" + expected = "projects/{project}/alertPolicies/{alert_policy}/conditions/{condition}".format(project=project, alert_policy=alert_policy, condition=condition, ) + actual = AlertPolicyServiceClient.alert_policy_condition_path(project, alert_policy, condition) + assert expected == actual + + +def test_parse_alert_policy_condition_path(): + expected = { + "project": "mussel", + "alert_policy": "winkle", + "condition": "nautilus", + } + path = AlertPolicyServiceClient.alert_policy_condition_path(**expected) + + # Check that the path construction is reversible. + actual = AlertPolicyServiceClient.parse_alert_policy_condition_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = AlertPolicyServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = AlertPolicyServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AlertPolicyServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = AlertPolicyServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = AlertPolicyServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AlertPolicyServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = AlertPolicyServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = AlertPolicyServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AlertPolicyServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = AlertPolicyServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = AlertPolicyServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AlertPolicyServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = AlertPolicyServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = AlertPolicyServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AlertPolicyServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.AlertPolicyServiceTransport, '_prep_wrapped_messages') as prep: + client = AlertPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.AlertPolicyServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = AlertPolicyServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_group_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_group_service.py new file mode 100644 index 00000000..4c21480d --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_group_service.py @@ -0,0 +1,2765 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api import monitored_resource_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.monitoring_v3.services.group_service import GroupServiceAsyncClient +from google.cloud.monitoring_v3.services.group_service import GroupServiceClient +from google.cloud.monitoring_v3.services.group_service import pagers +from google.cloud.monitoring_v3.services.group_service import transports +from google.cloud.monitoring_v3.services.group_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.monitoring_v3.types import common +from google.cloud.monitoring_v3.types import group +from google.cloud.monitoring_v3.types import group as gm_group +from google.cloud.monitoring_v3.types import group_service +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GroupServiceClient._get_default_mtls_endpoint(None) is None + assert GroupServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert GroupServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert GroupServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert GroupServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert GroupServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + GroupServiceClient, + GroupServiceAsyncClient, +]) +def test_group_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.GroupServiceGrpcTransport, "grpc"), + (transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_group_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + GroupServiceClient, + GroupServiceAsyncClient, +]) +def test_group_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_group_service_client_get_transport_class(): + transport = GroupServiceClient.get_transport_class() + available_transports = [ + transports.GroupServiceGrpcTransport, + ] + assert transport in available_transports + + transport = GroupServiceClient.get_transport_class("grpc") + assert transport == transports.GroupServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc"), + (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(GroupServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GroupServiceClient)) +@mock.patch.object(GroupServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GroupServiceAsyncClient)) +def test_group_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GroupServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GroupServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc", "true"), + (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc", "false"), + (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(GroupServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GroupServiceClient)) +@mock.patch.object(GroupServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GroupServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_group_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc"), + (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_group_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc"), + (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_group_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_group_service_client_client_options_from_dict(): + with mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = GroupServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_groups(transport: str = 'grpc', request_type=group_service.ListGroupsRequest): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = group_service.ListGroupsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.ListGroupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGroupsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_groups_from_dict(): + test_list_groups(request_type=dict) + + +def test_list_groups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__') as call: + client.list_groups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.ListGroupsRequest() + + +@pytest.mark.asyncio +async def test_list_groups_async(transport: str = 'grpc_asyncio', request_type=group_service.ListGroupsRequest): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.ListGroupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGroupsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_groups_async_from_dict(): + await test_list_groups_async(request_type=dict) + + +def test_list_groups_field_headers(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.ListGroupsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__') as call: + call.return_value = group_service.ListGroupsResponse() + client.list_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_groups_field_headers_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.ListGroupsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupsResponse()) + await client.list_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_groups_flattened(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = group_service.ListGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_groups( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_list_groups_flattened_error(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_groups( + group_service.ListGroupsRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_list_groups_flattened_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = group_service.ListGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_groups( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_list_groups_flattened_error_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_groups( + group_service.ListGroupsRequest(), + name='name_value', + ) + + +def test_list_groups_pager(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + group_service.ListGroupsResponse( + group=[ + group.Group(), + group.Group(), + group.Group(), + ], + next_page_token='abc', + ), + group_service.ListGroupsResponse( + group=[], + next_page_token='def', + ), + group_service.ListGroupsResponse( + group=[ + group.Group(), + ], + next_page_token='ghi', + ), + group_service.ListGroupsResponse( + group=[ + group.Group(), + group.Group(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.list_groups(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, group.Group) + for i in results) + +def test_list_groups_pages(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + group_service.ListGroupsResponse( + group=[ + group.Group(), + group.Group(), + group.Group(), + ], + next_page_token='abc', + ), + group_service.ListGroupsResponse( + group=[], + next_page_token='def', + ), + group_service.ListGroupsResponse( + group=[ + group.Group(), + ], + next_page_token='ghi', + ), + group_service.ListGroupsResponse( + group=[ + group.Group(), + group.Group(), + ], + ), + RuntimeError, + ) + pages = list(client.list_groups(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_groups_async_pager(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + group_service.ListGroupsResponse( + group=[ + group.Group(), + group.Group(), + group.Group(), + ], + next_page_token='abc', + ), + group_service.ListGroupsResponse( + group=[], + next_page_token='def', + ), + group_service.ListGroupsResponse( + group=[ + group.Group(), + ], + next_page_token='ghi', + ), + group_service.ListGroupsResponse( + group=[ + group.Group(), + group.Group(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_groups(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, group.Group) + for i in responses) + +@pytest.mark.asyncio +async def test_list_groups_async_pages(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + group_service.ListGroupsResponse( + group=[ + group.Group(), + group.Group(), + group.Group(), + ], + next_page_token='abc', + ), + group_service.ListGroupsResponse( + group=[], + next_page_token='def', + ), + group_service.ListGroupsResponse( + group=[ + group.Group(), + ], + next_page_token='ghi', + ), + group_service.ListGroupsResponse( + group=[ + group.Group(), + group.Group(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_groups(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_group(transport: str = 'grpc', request_type=group_service.GetGroupRequest): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = group.Group( + name='name_value', + display_name='display_name_value', + parent_name='parent_name_value', + filter='filter_value', + is_cluster=True, + ) + response = client.get_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.GetGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, group.Group) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.parent_name == 'parent_name_value' + assert response.filter == 'filter_value' + assert response.is_cluster is True + + +def test_get_group_from_dict(): + test_get_group(request_type=dict) + + +def test_get_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_group), + '__call__') as call: + client.get_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.GetGroupRequest() + + +@pytest.mark.asyncio +async def test_get_group_async(transport: str = 'grpc_asyncio', request_type=group_service.GetGroupRequest): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(group.Group( + name='name_value', + display_name='display_name_value', + parent_name='parent_name_value', + filter='filter_value', + is_cluster=True, + )) + response = await client.get_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.GetGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, group.Group) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.parent_name == 'parent_name_value' + assert response.filter == 'filter_value' + assert response.is_cluster is True + + +@pytest.mark.asyncio +async def test_get_group_async_from_dict(): + await test_get_group_async(request_type=dict) + + +def test_get_group_field_headers(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.GetGroupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_group), + '__call__') as call: + call.return_value = group.Group() + client.get_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_group_field_headers_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.GetGroupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group.Group()) + await client.get_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_group_flattened(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = group.Group() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_group_flattened_error(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_group( + group_service.GetGroupRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_group_flattened_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = group.Group() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group.Group()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_group_flattened_error_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_group( + group_service.GetGroupRequest(), + name='name_value', + ) + + +def test_create_group(transport: str = 'grpc', request_type=group_service.CreateGroupRequest): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_group.Group( + name='name_value', + display_name='display_name_value', + parent_name='parent_name_value', + filter='filter_value', + is_cluster=True, + ) + response = client.create_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.CreateGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gm_group.Group) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.parent_name == 'parent_name_value' + assert response.filter == 'filter_value' + assert response.is_cluster is True + + +def test_create_group_from_dict(): + test_create_group(request_type=dict) + + +def test_create_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_group), + '__call__') as call: + client.create_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.CreateGroupRequest() + + +@pytest.mark.asyncio +async def test_create_group_async(transport: str = 'grpc_asyncio', request_type=group_service.CreateGroupRequest): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group( + name='name_value', + display_name='display_name_value', + parent_name='parent_name_value', + filter='filter_value', + is_cluster=True, + )) + response = await client.create_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.CreateGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gm_group.Group) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.parent_name == 'parent_name_value' + assert response.filter == 'filter_value' + assert response.is_cluster is True + + +@pytest.mark.asyncio +async def test_create_group_async_from_dict(): + await test_create_group_async(request_type=dict) + + +def test_create_group_field_headers(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.CreateGroupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_group), + '__call__') as call: + call.return_value = gm_group.Group() + client.create_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_group_field_headers_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.CreateGroupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group()) + await client.create_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_group_flattened(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_group.Group() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_group( + name='name_value', + group=gm_group.Group(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].group == gm_group.Group(name='name_value') + + +def test_create_group_flattened_error(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_group( + group_service.CreateGroupRequest(), + name='name_value', + group=gm_group.Group(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_group_flattened_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_group.Group() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_group( + name='name_value', + group=gm_group.Group(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].group == gm_group.Group(name='name_value') + + +@pytest.mark.asyncio +async def test_create_group_flattened_error_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_group( + group_service.CreateGroupRequest(), + name='name_value', + group=gm_group.Group(name='name_value'), + ) + + +def test_update_group(transport: str = 'grpc', request_type=group_service.UpdateGroupRequest): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_group.Group( + name='name_value', + display_name='display_name_value', + parent_name='parent_name_value', + filter='filter_value', + is_cluster=True, + ) + response = client.update_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.UpdateGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gm_group.Group) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.parent_name == 'parent_name_value' + assert response.filter == 'filter_value' + assert response.is_cluster is True + + +def test_update_group_from_dict(): + test_update_group(request_type=dict) + + +def test_update_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_group), + '__call__') as call: + client.update_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.UpdateGroupRequest() + + +@pytest.mark.asyncio +async def test_update_group_async(transport: str = 'grpc_asyncio', request_type=group_service.UpdateGroupRequest): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group( + name='name_value', + display_name='display_name_value', + parent_name='parent_name_value', + filter='filter_value', + is_cluster=True, + )) + response = await client.update_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.UpdateGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gm_group.Group) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.parent_name == 'parent_name_value' + assert response.filter == 'filter_value' + assert response.is_cluster is True + + +@pytest.mark.asyncio +async def test_update_group_async_from_dict(): + await test_update_group_async(request_type=dict) + + +def test_update_group_field_headers(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.UpdateGroupRequest() + + request.group.name = 'group.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_group), + '__call__') as call: + call.return_value = gm_group.Group() + client.update_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'group.name=group.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_group_field_headers_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.UpdateGroupRequest() + + request.group.name = 'group.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group()) + await client.update_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'group.name=group.name/value', + ) in kw['metadata'] + + +def test_update_group_flattened(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_group.Group() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_group( + group=gm_group.Group(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].group == gm_group.Group(name='name_value') + + +def test_update_group_flattened_error(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_group( + group_service.UpdateGroupRequest(), + group=gm_group.Group(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_update_group_flattened_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_group.Group() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_group( + group=gm_group.Group(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].group == gm_group.Group(name='name_value') + + +@pytest.mark.asyncio +async def test_update_group_flattened_error_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_group( + group_service.UpdateGroupRequest(), + group=gm_group.Group(name='name_value'), + ) + + +def test_delete_group(transport: str = 'grpc', request_type=group_service.DeleteGroupRequest): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.DeleteGroupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_group_from_dict(): + test_delete_group(request_type=dict) + + +def test_delete_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_group), + '__call__') as call: + client.delete_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.DeleteGroupRequest() + + +@pytest.mark.asyncio +async def test_delete_group_async(transport: str = 'grpc_asyncio', request_type=group_service.DeleteGroupRequest): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.DeleteGroupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_group_async_from_dict(): + await test_delete_group_async(request_type=dict) + + +def test_delete_group_field_headers(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.DeleteGroupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_group), + '__call__') as call: + call.return_value = None + client.delete_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_group_field_headers_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.DeleteGroupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_group_flattened(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_group_flattened_error(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_group( + group_service.DeleteGroupRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_group_flattened_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_group_flattened_error_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_group( + group_service.DeleteGroupRequest(), + name='name_value', + ) + + +def test_list_group_members(transport: str = 'grpc', request_type=group_service.ListGroupMembersRequest): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = group_service.ListGroupMembersResponse( + next_page_token='next_page_token_value', + total_size=1086, + ) + response = client.list_group_members(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.ListGroupMembersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGroupMembersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.total_size == 1086 + + +def test_list_group_members_from_dict(): + test_list_group_members(request_type=dict) + + +def test_list_group_members_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__') as call: + client.list_group_members() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.ListGroupMembersRequest() + + +@pytest.mark.asyncio +async def test_list_group_members_async(transport: str = 'grpc_asyncio', request_type=group_service.ListGroupMembersRequest): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupMembersResponse( + next_page_token='next_page_token_value', + total_size=1086, + )) + response = await client.list_group_members(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == group_service.ListGroupMembersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGroupMembersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_group_members_async_from_dict(): + await test_list_group_members_async(request_type=dict) + + +def test_list_group_members_field_headers(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.ListGroupMembersRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__') as call: + call.return_value = group_service.ListGroupMembersResponse() + client.list_group_members(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_group_members_field_headers_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = group_service.ListGroupMembersRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupMembersResponse()) + await client.list_group_members(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_group_members_flattened(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = group_service.ListGroupMembersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_group_members( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_list_group_members_flattened_error(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_group_members( + group_service.ListGroupMembersRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_list_group_members_flattened_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = group_service.ListGroupMembersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupMembersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_group_members( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_list_group_members_flattened_error_async(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_group_members( + group_service.ListGroupMembersRequest(), + name='name_value', + ) + + +def test_list_group_members_pager(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + ], + next_page_token='abc', + ), + group_service.ListGroupMembersResponse( + members=[], + next_page_token='def', + ), + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + ], + next_page_token='ghi', + ), + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.list_group_members(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResource) + for i in results) + +def test_list_group_members_pages(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + ], + next_page_token='abc', + ), + group_service.ListGroupMembersResponse( + members=[], + next_page_token='def', + ), + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + ], + next_page_token='ghi', + ), + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_group_members(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_group_members_async_pager(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + ], + next_page_token='abc', + ), + group_service.ListGroupMembersResponse( + members=[], + next_page_token='def', + ), + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + ], + next_page_token='ghi', + ), + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_group_members(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResource) + for i in responses) + +@pytest.mark.asyncio +async def test_list_group_members_async_pages(): + client = GroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_group_members), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + ], + next_page_token='abc', + ), + group_service.ListGroupMembersResponse( + members=[], + next_page_token='def', + ), + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + ], + next_page_token='ghi', + ), + group_service.ListGroupMembersResponse( + members=[ + monitored_resource_pb2.MonitoredResource(), + monitored_resource_pb2.MonitoredResource(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_group_members(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GroupServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GroupServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GroupServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.GroupServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.GroupServiceGrpcTransport, + transports.GroupServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.GroupServiceGrpcTransport, + ) + +def test_group_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GroupServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_group_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.GroupServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_groups', + 'get_group', + 'create_group', + 'update_group', + 'delete_group', + 'list_group_members', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_group_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GroupServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_group_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GroupServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ), + quota_project_id="octopus", + ) + + +def test_group_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GroupServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_group_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GroupServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_group_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GroupServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GroupServiceGrpcTransport, + transports.GroupServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_group_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GroupServiceGrpcTransport, + transports.GroupServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_group_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.GroupServiceGrpcTransport, grpc_helpers), + (transports.GroupServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_group_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "monitoring.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + scopes=["1", "2"], + default_host="monitoring.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.GroupServiceGrpcTransport, transports.GroupServiceGrpcAsyncIOTransport]) +def test_group_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_group_service_host_no_port(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), + ) + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_group_service_host_with_port(): + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), + ) + assert client.transport._host == 'monitoring.googleapis.com:8000' + +def test_group_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GroupServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_group_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GroupServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.GroupServiceGrpcTransport, transports.GroupServiceGrpcAsyncIOTransport]) +def test_group_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.GroupServiceGrpcTransport, transports.GroupServiceGrpcAsyncIOTransport]) +def test_group_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_group_path(): + project = "squid" + group = "clam" + expected = "projects/{project}/groups/{group}".format(project=project, group=group, ) + actual = GroupServiceClient.group_path(project, group) + assert expected == actual + + +def test_parse_group_path(): + expected = { + "project": "whelk", + "group": "octopus", + } + path = GroupServiceClient.group_path(**expected) + + # Check that the path construction is reversible. + actual = GroupServiceClient.parse_group_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = GroupServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = GroupServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GroupServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = GroupServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = GroupServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GroupServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = GroupServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = GroupServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GroupServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = GroupServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = GroupServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GroupServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = GroupServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = GroupServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GroupServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.GroupServiceTransport, '_prep_wrapped_messages') as prep: + client = GroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.GroupServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = GroupServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_metric_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_metric_service.py new file mode 100644 index 00000000..ccb2f6d0 --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_metric_service.py @@ -0,0 +1,3511 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api import distribution_pb2 # type: ignore +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.monitoring_v3.services.metric_service import MetricServiceAsyncClient +from google.cloud.monitoring_v3.services.metric_service import MetricServiceClient +from google.cloud.monitoring_v3.services.metric_service import pagers +from google.cloud.monitoring_v3.services.metric_service import transports +from google.cloud.monitoring_v3.services.metric_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.monitoring_v3.types import common +from google.cloud.monitoring_v3.types import metric as gm_metric +from google.cloud.monitoring_v3.types import metric_service +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MetricServiceClient._get_default_mtls_endpoint(None) is None + assert MetricServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MetricServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MetricServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MetricServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MetricServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + MetricServiceClient, + MetricServiceAsyncClient, +]) +def test_metric_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MetricServiceGrpcTransport, "grpc"), + (transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metric_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + MetricServiceClient, + MetricServiceAsyncClient, +]) +def test_metric_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_metric_service_client_get_transport_class(): + transport = MetricServiceClient.get_transport_class() + available_transports = [ + transports.MetricServiceGrpcTransport, + ] + assert transport in available_transports + + transport = MetricServiceClient.get_transport_class("grpc") + assert transport == transports.MetricServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc"), + (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(MetricServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricServiceClient)) +@mock.patch.object(MetricServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricServiceAsyncClient)) +def test_metric_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MetricServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MetricServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc", "true"), + (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc", "false"), + (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(MetricServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricServiceClient)) +@mock.patch.object(MetricServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_metric_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc"), + (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metric_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc"), + (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metric_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_metric_service_client_client_options_from_dict(): + with mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = MetricServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_monitored_resource_descriptors(transport: str = 'grpc', request_type=metric_service.ListMonitoredResourceDescriptorsRequest): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.ListMonitoredResourceDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_monitored_resource_descriptors_from_dict(): + test_list_monitored_resource_descriptors(request_type=dict) + + +def test_list_monitored_resource_descriptors_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + client.list_monitored_resource_descriptors() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.ListMonitoredResourceDescriptorsRequest() + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=metric_service.ListMonitoredResourceDescriptorsRequest): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.ListMonitoredResourceDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_from_dict(): + await test_list_monitored_resource_descriptors_async(request_type=dict) + + +def test_list_monitored_resource_descriptors_field_headers(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.ListMonitoredResourceDescriptorsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + call.return_value = metric_service.ListMonitoredResourceDescriptorsResponse() + client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_field_headers_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.ListMonitoredResourceDescriptorsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMonitoredResourceDescriptorsResponse()) + await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_monitored_resource_descriptors_flattened(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.ListMonitoredResourceDescriptorsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_monitored_resource_descriptors( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_list_monitored_resource_descriptors_flattened_error(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_monitored_resource_descriptors( + metric_service.ListMonitoredResourceDescriptorsRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_flattened_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.ListMonitoredResourceDescriptorsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMonitoredResourceDescriptorsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_monitored_resource_descriptors( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_flattened_error_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_monitored_resource_descriptors( + metric_service.ListMonitoredResourceDescriptorsRequest(), + name='name_value', + ) + + +def test_list_monitored_resource_descriptors_pager(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.list_monitored_resource_descriptors(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in results) + +def test_list_monitored_resource_descriptors_pages(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = list(client.list_monitored_resource_descriptors(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pager(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_monitored_resource_descriptors(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in responses) + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pages(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + metric_service.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_monitored_resource_descriptors(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_monitored_resource_descriptor(transport: str = 'grpc', request_type=metric_service.GetMonitoredResourceDescriptorRequest): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_monitored_resource_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = monitored_resource_pb2.MonitoredResourceDescriptor( + name='name_value', + type_='type__value', + display_name='display_name_value', + description='description_value', + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + ) + response = client.get_monitored_resource_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.GetMonitoredResourceDescriptorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, monitored_resource_pb2.MonitoredResourceDescriptor) + assert response.name == 'name_value' + assert response.type_ == 'type__value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + + +def test_get_monitored_resource_descriptor_from_dict(): + test_get_monitored_resource_descriptor(request_type=dict) + + +def test_get_monitored_resource_descriptor_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_monitored_resource_descriptor), + '__call__') as call: + client.get_monitored_resource_descriptor() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.GetMonitoredResourceDescriptorRequest() + + +@pytest.mark.asyncio +async def test_get_monitored_resource_descriptor_async(transport: str = 'grpc_asyncio', request_type=metric_service.GetMonitoredResourceDescriptorRequest): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_monitored_resource_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(monitored_resource_pb2.MonitoredResourceDescriptor( + name='name_value', + type_='type__value', + display_name='display_name_value', + description='description_value', + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + )) + response = await client.get_monitored_resource_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.GetMonitoredResourceDescriptorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, monitored_resource_pb2.MonitoredResourceDescriptor) + assert response.name == 'name_value' + assert response.type_ == 'type__value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + + +@pytest.mark.asyncio +async def test_get_monitored_resource_descriptor_async_from_dict(): + await test_get_monitored_resource_descriptor_async(request_type=dict) + + +def test_get_monitored_resource_descriptor_field_headers(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.GetMonitoredResourceDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_monitored_resource_descriptor), + '__call__') as call: + call.return_value = monitored_resource_pb2.MonitoredResourceDescriptor() + client.get_monitored_resource_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_monitored_resource_descriptor_field_headers_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.GetMonitoredResourceDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_monitored_resource_descriptor), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(monitored_resource_pb2.MonitoredResourceDescriptor()) + await client.get_monitored_resource_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_monitored_resource_descriptor_flattened(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_monitored_resource_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = monitored_resource_pb2.MonitoredResourceDescriptor() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_monitored_resource_descriptor( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_monitored_resource_descriptor_flattened_error(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_monitored_resource_descriptor( + metric_service.GetMonitoredResourceDescriptorRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_monitored_resource_descriptor_flattened_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_monitored_resource_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = monitored_resource_pb2.MonitoredResourceDescriptor() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(monitored_resource_pb2.MonitoredResourceDescriptor()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_monitored_resource_descriptor( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_monitored_resource_descriptor_flattened_error_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_monitored_resource_descriptor( + metric_service.GetMonitoredResourceDescriptorRequest(), + name='name_value', + ) + + +def test_list_metric_descriptors(transport: str = 'grpc', request_type=metric_service.ListMetricDescriptorsRequest): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.ListMetricDescriptorsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_metric_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.ListMetricDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetricDescriptorsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_metric_descriptors_from_dict(): + test_list_metric_descriptors(request_type=dict) + + +def test_list_metric_descriptors_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__') as call: + client.list_metric_descriptors() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.ListMetricDescriptorsRequest() + + +@pytest.mark.asyncio +async def test_list_metric_descriptors_async(transport: str = 'grpc_asyncio', request_type=metric_service.ListMetricDescriptorsRequest): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMetricDescriptorsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_metric_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.ListMetricDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetricDescriptorsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_metric_descriptors_async_from_dict(): + await test_list_metric_descriptors_async(request_type=dict) + + +def test_list_metric_descriptors_field_headers(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.ListMetricDescriptorsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__') as call: + call.return_value = metric_service.ListMetricDescriptorsResponse() + client.list_metric_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_metric_descriptors_field_headers_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.ListMetricDescriptorsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMetricDescriptorsResponse()) + await client.list_metric_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_metric_descriptors_flattened(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.ListMetricDescriptorsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_metric_descriptors( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_list_metric_descriptors_flattened_error(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_metric_descriptors( + metric_service.ListMetricDescriptorsRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_list_metric_descriptors_flattened_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.ListMetricDescriptorsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMetricDescriptorsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_metric_descriptors( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_list_metric_descriptors_flattened_error_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_metric_descriptors( + metric_service.ListMetricDescriptorsRequest(), + name='name_value', + ) + + +def test_list_metric_descriptors_pager(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + ], + next_page_token='abc', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[], + next_page_token='def', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + ], + next_page_token='ghi', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.list_metric_descriptors(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, metric_pb2.MetricDescriptor) + for i in results) + +def test_list_metric_descriptors_pages(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + ], + next_page_token='abc', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[], + next_page_token='def', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + ], + next_page_token='ghi', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + ], + ), + RuntimeError, + ) + pages = list(client.list_metric_descriptors(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_metric_descriptors_async_pager(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + ], + next_page_token='abc', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[], + next_page_token='def', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + ], + next_page_token='ghi', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_metric_descriptors(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metric_pb2.MetricDescriptor) + for i in responses) + +@pytest.mark.asyncio +async def test_list_metric_descriptors_async_pages(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metric_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + ], + next_page_token='abc', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[], + next_page_token='def', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + ], + next_page_token='ghi', + ), + metric_service.ListMetricDescriptorsResponse( + metric_descriptors=[ + metric_pb2.MetricDescriptor(), + metric_pb2.MetricDescriptor(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_metric_descriptors(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_metric_descriptor(transport: str = 'grpc', request_type=metric_service.GetMetricDescriptorRequest): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_pb2.MetricDescriptor( + name='name_value', + type_='type__value', + metric_kind=metric_pb2.MetricDescriptor.MetricKind.GAUGE, + value_type=metric_pb2.MetricDescriptor.ValueType.BOOL, + unit='unit_value', + description='description_value', + display_name='display_name_value', + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + monitored_resource_types=['monitored_resource_types_value'], + ) + response = client.get_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.GetMetricDescriptorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metric_pb2.MetricDescriptor) + assert response.name == 'name_value' + assert response.type_ == 'type__value' + assert response.metric_kind == metric_pb2.MetricDescriptor.MetricKind.GAUGE + assert response.value_type == metric_pb2.MetricDescriptor.ValueType.BOOL + assert response.unit == 'unit_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.monitored_resource_types == ['monitored_resource_types_value'] + + +def test_get_metric_descriptor_from_dict(): + test_get_metric_descriptor(request_type=dict) + + +def test_get_metric_descriptor_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metric_descriptor), + '__call__') as call: + client.get_metric_descriptor() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.GetMetricDescriptorRequest() + + +@pytest.mark.asyncio +async def test_get_metric_descriptor_async(transport: str = 'grpc_asyncio', request_type=metric_service.GetMetricDescriptorRequest): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor( + name='name_value', + type_='type__value', + metric_kind=metric_pb2.MetricDescriptor.MetricKind.GAUGE, + value_type=metric_pb2.MetricDescriptor.ValueType.BOOL, + unit='unit_value', + description='description_value', + display_name='display_name_value', + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + monitored_resource_types=['monitored_resource_types_value'], + )) + response = await client.get_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.GetMetricDescriptorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metric_pb2.MetricDescriptor) + assert response.name == 'name_value' + assert response.type_ == 'type__value' + assert response.metric_kind == metric_pb2.MetricDescriptor.MetricKind.GAUGE + assert response.value_type == metric_pb2.MetricDescriptor.ValueType.BOOL + assert response.unit == 'unit_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.monitored_resource_types == ['monitored_resource_types_value'] + + +@pytest.mark.asyncio +async def test_get_metric_descriptor_async_from_dict(): + await test_get_metric_descriptor_async(request_type=dict) + + +def test_get_metric_descriptor_field_headers(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.GetMetricDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metric_descriptor), + '__call__') as call: + call.return_value = metric_pb2.MetricDescriptor() + client.get_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_metric_descriptor_field_headers_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.GetMetricDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metric_descriptor), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor()) + await client.get_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_metric_descriptor_flattened(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_pb2.MetricDescriptor() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_metric_descriptor( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_metric_descriptor_flattened_error(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metric_descriptor( + metric_service.GetMetricDescriptorRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_metric_descriptor_flattened_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_pb2.MetricDescriptor() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_metric_descriptor( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_metric_descriptor_flattened_error_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_metric_descriptor( + metric_service.GetMetricDescriptorRequest(), + name='name_value', + ) + + +def test_create_metric_descriptor(transport: str = 'grpc', request_type=metric_service.CreateMetricDescriptorRequest): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_pb2.MetricDescriptor( + name='name_value', + type_='type__value', + metric_kind=metric_pb2.MetricDescriptor.MetricKind.GAUGE, + value_type=metric_pb2.MetricDescriptor.ValueType.BOOL, + unit='unit_value', + description='description_value', + display_name='display_name_value', + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + monitored_resource_types=['monitored_resource_types_value'], + ) + response = client.create_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.CreateMetricDescriptorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metric_pb2.MetricDescriptor) + assert response.name == 'name_value' + assert response.type_ == 'type__value' + assert response.metric_kind == metric_pb2.MetricDescriptor.MetricKind.GAUGE + assert response.value_type == metric_pb2.MetricDescriptor.ValueType.BOOL + assert response.unit == 'unit_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.monitored_resource_types == ['monitored_resource_types_value'] + + +def test_create_metric_descriptor_from_dict(): + test_create_metric_descriptor(request_type=dict) + + +def test_create_metric_descriptor_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metric_descriptor), + '__call__') as call: + client.create_metric_descriptor() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.CreateMetricDescriptorRequest() + + +@pytest.mark.asyncio +async def test_create_metric_descriptor_async(transport: str = 'grpc_asyncio', request_type=metric_service.CreateMetricDescriptorRequest): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor( + name='name_value', + type_='type__value', + metric_kind=metric_pb2.MetricDescriptor.MetricKind.GAUGE, + value_type=metric_pb2.MetricDescriptor.ValueType.BOOL, + unit='unit_value', + description='description_value', + display_name='display_name_value', + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + monitored_resource_types=['monitored_resource_types_value'], + )) + response = await client.create_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.CreateMetricDescriptorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metric_pb2.MetricDescriptor) + assert response.name == 'name_value' + assert response.type_ == 'type__value' + assert response.metric_kind == metric_pb2.MetricDescriptor.MetricKind.GAUGE + assert response.value_type == metric_pb2.MetricDescriptor.ValueType.BOOL + assert response.unit == 'unit_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.monitored_resource_types == ['monitored_resource_types_value'] + + +@pytest.mark.asyncio +async def test_create_metric_descriptor_async_from_dict(): + await test_create_metric_descriptor_async(request_type=dict) + + +def test_create_metric_descriptor_field_headers(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.CreateMetricDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metric_descriptor), + '__call__') as call: + call.return_value = metric_pb2.MetricDescriptor() + client.create_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_metric_descriptor_field_headers_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.CreateMetricDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metric_descriptor), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor()) + await client.create_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_metric_descriptor_flattened(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_pb2.MetricDescriptor() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_metric_descriptor( + name='name_value', + metric_descriptor=metric_pb2.MetricDescriptor(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].metric_descriptor == metric_pb2.MetricDescriptor(name='name_value') + + +def test_create_metric_descriptor_flattened_error(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_metric_descriptor( + metric_service.CreateMetricDescriptorRequest(), + name='name_value', + metric_descriptor=metric_pb2.MetricDescriptor(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_metric_descriptor_flattened_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_pb2.MetricDescriptor() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_metric_descriptor( + name='name_value', + metric_descriptor=metric_pb2.MetricDescriptor(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].metric_descriptor == metric_pb2.MetricDescriptor(name='name_value') + + +@pytest.mark.asyncio +async def test_create_metric_descriptor_flattened_error_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_metric_descriptor( + metric_service.CreateMetricDescriptorRequest(), + name='name_value', + metric_descriptor=metric_pb2.MetricDescriptor(name='name_value'), + ) + + +def test_delete_metric_descriptor(transport: str = 'grpc', request_type=metric_service.DeleteMetricDescriptorRequest): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.DeleteMetricDescriptorRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_metric_descriptor_from_dict(): + test_delete_metric_descriptor(request_type=dict) + + +def test_delete_metric_descriptor_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_metric_descriptor), + '__call__') as call: + client.delete_metric_descriptor() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.DeleteMetricDescriptorRequest() + + +@pytest.mark.asyncio +async def test_delete_metric_descriptor_async(transport: str = 'grpc_asyncio', request_type=metric_service.DeleteMetricDescriptorRequest): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.DeleteMetricDescriptorRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_metric_descriptor_async_from_dict(): + await test_delete_metric_descriptor_async(request_type=dict) + + +def test_delete_metric_descriptor_field_headers(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.DeleteMetricDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_metric_descriptor), + '__call__') as call: + call.return_value = None + client.delete_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_metric_descriptor_field_headers_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.DeleteMetricDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_metric_descriptor), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_metric_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_metric_descriptor_flattened(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_metric_descriptor( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_metric_descriptor_flattened_error(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_metric_descriptor( + metric_service.DeleteMetricDescriptorRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_metric_descriptor_flattened_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_metric_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_metric_descriptor( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_metric_descriptor_flattened_error_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_metric_descriptor( + metric_service.DeleteMetricDescriptorRequest(), + name='name_value', + ) + + +def test_list_time_series(transport: str = 'grpc', request_type=metric_service.ListTimeSeriesRequest): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.ListTimeSeriesResponse( + next_page_token='next_page_token_value', + unit='unit_value', + ) + response = client.list_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.ListTimeSeriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTimeSeriesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unit == 'unit_value' + + +def test_list_time_series_from_dict(): + test_list_time_series(request_type=dict) + + +def test_list_time_series_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__') as call: + client.list_time_series() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.ListTimeSeriesRequest() + + +@pytest.mark.asyncio +async def test_list_time_series_async(transport: str = 'grpc_asyncio', request_type=metric_service.ListTimeSeriesRequest): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListTimeSeriesResponse( + next_page_token='next_page_token_value', + unit='unit_value', + )) + response = await client.list_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.ListTimeSeriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTimeSeriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unit == 'unit_value' + + +@pytest.mark.asyncio +async def test_list_time_series_async_from_dict(): + await test_list_time_series_async(request_type=dict) + + +def test_list_time_series_field_headers(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.ListTimeSeriesRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__') as call: + call.return_value = metric_service.ListTimeSeriesResponse() + client.list_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_time_series_field_headers_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.ListTimeSeriesRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListTimeSeriesResponse()) + await client.list_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_time_series_flattened(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.ListTimeSeriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_time_series( + name='name_value', + filter='filter_value', + interval=common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)), + view=metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].filter == 'filter_value' + assert args[0].interval == common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)) + assert args[0].view == metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS + + +def test_list_time_series_flattened_error(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_time_series( + metric_service.ListTimeSeriesRequest(), + name='name_value', + filter='filter_value', + interval=common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)), + view=metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS, + ) + + +@pytest.mark.asyncio +async def test_list_time_series_flattened_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.ListTimeSeriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListTimeSeriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_time_series( + name='name_value', + filter='filter_value', + interval=common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)), + view=metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].filter == 'filter_value' + assert args[0].interval == common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)) + assert args[0].view == metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS + + +@pytest.mark.asyncio +async def test_list_time_series_flattened_error_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_time_series( + metric_service.ListTimeSeriesRequest(), + name='name_value', + filter='filter_value', + interval=common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)), + view=metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS, + ) + + +def test_list_time_series_pager(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + ], + next_page_token='abc', + ), + metric_service.ListTimeSeriesResponse( + time_series=[], + next_page_token='def', + ), + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + ], + next_page_token='ghi', + ), + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.list_time_series(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, gm_metric.TimeSeries) + for i in results) + +def test_list_time_series_pages(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + ], + next_page_token='abc', + ), + metric_service.ListTimeSeriesResponse( + time_series=[], + next_page_token='def', + ), + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + ], + next_page_token='ghi', + ), + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + ], + ), + RuntimeError, + ) + pages = list(client.list_time_series(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_time_series_async_pager(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + ], + next_page_token='abc', + ), + metric_service.ListTimeSeriesResponse( + time_series=[], + next_page_token='def', + ), + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + ], + next_page_token='ghi', + ), + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_time_series(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, gm_metric.TimeSeries) + for i in responses) + +@pytest.mark.asyncio +async def test_list_time_series_async_pages(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_time_series), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + ], + next_page_token='abc', + ), + metric_service.ListTimeSeriesResponse( + time_series=[], + next_page_token='def', + ), + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + ], + next_page_token='ghi', + ), + metric_service.ListTimeSeriesResponse( + time_series=[ + gm_metric.TimeSeries(), + gm_metric.TimeSeries(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_time_series(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_create_time_series(transport: str = 'grpc', request_type=metric_service.CreateTimeSeriesRequest): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.create_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.CreateTimeSeriesRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_create_time_series_from_dict(): + test_create_time_series(request_type=dict) + + +def test_create_time_series_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_time_series), + '__call__') as call: + client.create_time_series() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.CreateTimeSeriesRequest() + + +@pytest.mark.asyncio +async def test_create_time_series_async(transport: str = 'grpc_asyncio', request_type=metric_service.CreateTimeSeriesRequest): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.create_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.CreateTimeSeriesRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_create_time_series_async_from_dict(): + await test_create_time_series_async(request_type=dict) + + +def test_create_time_series_field_headers(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.CreateTimeSeriesRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_time_series), + '__call__') as call: + call.return_value = None + client.create_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_time_series_field_headers_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.CreateTimeSeriesRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_time_series), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.create_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_time_series_flattened(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_time_series( + name='name_value', + time_series=[gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].time_series == [gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))] + + +def test_create_time_series_flattened_error(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_time_series( + metric_service.CreateTimeSeriesRequest(), + name='name_value', + time_series=[gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))], + ) + + +@pytest.mark.asyncio +async def test_create_time_series_flattened_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_time_series( + name='name_value', + time_series=[gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].time_series == [gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))] + + +@pytest.mark.asyncio +async def test_create_time_series_flattened_error_async(): + client = MetricServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_time_series( + metric_service.CreateTimeSeriesRequest(), + name='name_value', + time_series=[gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))], + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetricServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetricServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetricServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MetricServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetricServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.MetricServiceGrpcTransport, + transports.MetricServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetricServiceGrpcTransport, + ) + +def test_metric_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetricServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_metric_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.MetricServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_monitored_resource_descriptors', + 'get_monitored_resource_descriptor', + 'list_metric_descriptors', + 'get_metric_descriptor', + 'create_metric_descriptor', + 'delete_metric_descriptor', + 'list_time_series', + 'create_time_series', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_metric_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + 'https://www.googleapis.com/auth/monitoring.write', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_metric_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + 'https://www.googleapis.com/auth/monitoring.write', + ), + quota_project_id="octopus", + ) + + +def test_metric_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_metric_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + 'https://www.googleapis.com/auth/monitoring.write', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_metric_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read', 'https://www.googleapis.com/auth/monitoring.write',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricServiceGrpcTransport, + transports.MetricServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_metric_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read', 'https://www.googleapis.com/auth/monitoring.write',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricServiceGrpcTransport, + transports.MetricServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_metric_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + 'https://www.googleapis.com/auth/monitoring.write', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricServiceGrpcTransport, grpc_helpers), + (transports.MetricServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_metric_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "monitoring.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + 'https://www.googleapis.com/auth/monitoring.write', +), + scopes=["1", "2"], + default_host="monitoring.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.MetricServiceGrpcTransport, transports.MetricServiceGrpcAsyncIOTransport]) +def test_metric_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_metric_service_host_no_port(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), + ) + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_metric_service_host_with_port(): + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), + ) + assert client.transport._host == 'monitoring.googleapis.com:8000' + +def test_metric_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metric_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricServiceGrpcTransport, transports.MetricServiceGrpcAsyncIOTransport]) +def test_metric_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricServiceGrpcTransport, transports.MetricServiceGrpcAsyncIOTransport]) +def test_metric_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_metric_descriptor_path(): + project = "squid" + expected = "projects/{project}/metricDescriptors/{metric_descriptor=**}".format(project=project, ) + actual = MetricServiceClient.metric_descriptor_path(project) + assert expected == actual + + +def test_parse_metric_descriptor_path(): + expected = { + "project": "clam", + } + path = MetricServiceClient.metric_descriptor_path(**expected) + + # Check that the path construction is reversible. + actual = MetricServiceClient.parse_metric_descriptor_path(path) + assert expected == actual + +def test_monitored_resource_descriptor_path(): + project = "whelk" + monitored_resource_descriptor = "octopus" + expected = "projects/{project}/monitoredResourceDescriptors/{monitored_resource_descriptor}".format(project=project, monitored_resource_descriptor=monitored_resource_descriptor, ) + actual = MetricServiceClient.monitored_resource_descriptor_path(project, monitored_resource_descriptor) + assert expected == actual + + +def test_parse_monitored_resource_descriptor_path(): + expected = { + "project": "oyster", + "monitored_resource_descriptor": "nudibranch", + } + path = MetricServiceClient.monitored_resource_descriptor_path(**expected) + + # Check that the path construction is reversible. + actual = MetricServiceClient.parse_monitored_resource_descriptor_path(path) + assert expected == actual + +def test_time_series_path(): + project = "cuttlefish" + time_series = "mussel" + expected = "projects/{project}/timeSeries/{time_series}".format(project=project, time_series=time_series, ) + actual = MetricServiceClient.time_series_path(project, time_series) + assert expected == actual + + +def test_parse_time_series_path(): + expected = { + "project": "winkle", + "time_series": "nautilus", + } + path = MetricServiceClient.time_series_path(**expected) + + # Check that the path construction is reversible. + actual = MetricServiceClient.parse_time_series_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MetricServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = MetricServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetricServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = MetricServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = MetricServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetricServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MetricServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = MetricServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetricServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = MetricServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = MetricServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetricServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MetricServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = MetricServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetricServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MetricServiceTransport, '_prep_wrapped_messages') as prep: + client = MetricServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MetricServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = MetricServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py new file mode 100644 index 00000000..f9cedb52 --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py @@ -0,0 +1,3757 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.monitoring_v3.services.notification_channel_service import NotificationChannelServiceAsyncClient +from google.cloud.monitoring_v3.services.notification_channel_service import NotificationChannelServiceClient +from google.cloud.monitoring_v3.services.notification_channel_service import pagers +from google.cloud.monitoring_v3.services.notification_channel_service import transports +from google.cloud.monitoring_v3.services.notification_channel_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.monitoring_v3.types import mutation_record +from google.cloud.monitoring_v3.types import notification +from google.cloud.monitoring_v3.types import notification_service +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NotificationChannelServiceClient._get_default_mtls_endpoint(None) is None + assert NotificationChannelServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert NotificationChannelServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert NotificationChannelServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert NotificationChannelServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert NotificationChannelServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + NotificationChannelServiceClient, + NotificationChannelServiceAsyncClient, +]) +def test_notification_channel_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.NotificationChannelServiceGrpcTransport, "grpc"), + (transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_notification_channel_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + NotificationChannelServiceClient, + NotificationChannelServiceAsyncClient, +]) +def test_notification_channel_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_notification_channel_service_client_get_transport_class(): + transport = NotificationChannelServiceClient.get_transport_class() + available_transports = [ + transports.NotificationChannelServiceGrpcTransport, + ] + assert transport in available_transports + + transport = NotificationChannelServiceClient.get_transport_class("grpc") + assert transport == transports.NotificationChannelServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc"), + (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(NotificationChannelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NotificationChannelServiceClient)) +@mock.patch.object(NotificationChannelServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NotificationChannelServiceAsyncClient)) +def test_notification_channel_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NotificationChannelServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NotificationChannelServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc", "true"), + (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc", "false"), + (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(NotificationChannelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NotificationChannelServiceClient)) +@mock.patch.object(NotificationChannelServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NotificationChannelServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_notification_channel_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc"), + (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_notification_channel_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc"), + (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_notification_channel_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_notification_channel_service_client_client_options_from_dict(): + with mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = NotificationChannelServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_notification_channel_descriptors(transport: str = 'grpc', request_type=notification_service.ListNotificationChannelDescriptorsRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification_service.ListNotificationChannelDescriptorsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_notification_channel_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.ListNotificationChannelDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotificationChannelDescriptorsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_notification_channel_descriptors_from_dict(): + test_list_notification_channel_descriptors(request_type=dict) + + +def test_list_notification_channel_descriptors_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__') as call: + client.list_notification_channel_descriptors() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.ListNotificationChannelDescriptorsRequest() + + +@pytest.mark.asyncio +async def test_list_notification_channel_descriptors_async(transport: str = 'grpc_asyncio', request_type=notification_service.ListNotificationChannelDescriptorsRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelDescriptorsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_notification_channel_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.ListNotificationChannelDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotificationChannelDescriptorsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_notification_channel_descriptors_async_from_dict(): + await test_list_notification_channel_descriptors_async(request_type=dict) + + +def test_list_notification_channel_descriptors_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.ListNotificationChannelDescriptorsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__') as call: + call.return_value = notification_service.ListNotificationChannelDescriptorsResponse() + client.list_notification_channel_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_notification_channel_descriptors_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.ListNotificationChannelDescriptorsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelDescriptorsResponse()) + await client.list_notification_channel_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_notification_channel_descriptors_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification_service.ListNotificationChannelDescriptorsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_notification_channel_descriptors( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_list_notification_channel_descriptors_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_notification_channel_descriptors( + notification_service.ListNotificationChannelDescriptorsRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_list_notification_channel_descriptors_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification_service.ListNotificationChannelDescriptorsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelDescriptorsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_notification_channel_descriptors( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_list_notification_channel_descriptors_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_notification_channel_descriptors( + notification_service.ListNotificationChannelDescriptorsRequest(), + name='name_value', + ) + + +def test_list_notification_channel_descriptors_pager(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + ], + next_page_token='abc', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[], + next_page_token='def', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + ], + next_page_token='ghi', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.list_notification_channel_descriptors(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, notification.NotificationChannelDescriptor) + for i in results) + +def test_list_notification_channel_descriptors_pages(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + ], + next_page_token='abc', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[], + next_page_token='def', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + ], + next_page_token='ghi', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + ], + ), + RuntimeError, + ) + pages = list(client.list_notification_channel_descriptors(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_notification_channel_descriptors_async_pager(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + ], + next_page_token='abc', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[], + next_page_token='def', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + ], + next_page_token='ghi', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_notification_channel_descriptors(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, notification.NotificationChannelDescriptor) + for i in responses) + +@pytest.mark.asyncio +async def test_list_notification_channel_descriptors_async_pages(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channel_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + ], + next_page_token='abc', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[], + next_page_token='def', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + ], + next_page_token='ghi', + ), + notification_service.ListNotificationChannelDescriptorsResponse( + channel_descriptors=[ + notification.NotificationChannelDescriptor(), + notification.NotificationChannelDescriptor(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_notification_channel_descriptors(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_notification_channel_descriptor(transport: str = 'grpc', request_type=notification_service.GetNotificationChannelDescriptorRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannelDescriptor( + name='name_value', + type_='type__value', + display_name='display_name_value', + description='description_value', + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + ) + response = client.get_notification_channel_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.GetNotificationChannelDescriptorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannelDescriptor) + assert response.name == 'name_value' + assert response.type_ == 'type__value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + + +def test_get_notification_channel_descriptor_from_dict(): + test_get_notification_channel_descriptor(request_type=dict) + + +def test_get_notification_channel_descriptor_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_descriptor), + '__call__') as call: + client.get_notification_channel_descriptor() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.GetNotificationChannelDescriptorRequest() + + +@pytest.mark.asyncio +async def test_get_notification_channel_descriptor_async(transport: str = 'grpc_asyncio', request_type=notification_service.GetNotificationChannelDescriptorRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannelDescriptor( + name='name_value', + type_='type__value', + display_name='display_name_value', + description='description_value', + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + )) + response = await client.get_notification_channel_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.GetNotificationChannelDescriptorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannelDescriptor) + assert response.name == 'name_value' + assert response.type_ == 'type__value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + + +@pytest.mark.asyncio +async def test_get_notification_channel_descriptor_async_from_dict(): + await test_get_notification_channel_descriptor_async(request_type=dict) + + +def test_get_notification_channel_descriptor_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.GetNotificationChannelDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_descriptor), + '__call__') as call: + call.return_value = notification.NotificationChannelDescriptor() + client.get_notification_channel_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_notification_channel_descriptor_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.GetNotificationChannelDescriptorRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_descriptor), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannelDescriptor()) + await client.get_notification_channel_descriptor(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_notification_channel_descriptor_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannelDescriptor() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_notification_channel_descriptor( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_notification_channel_descriptor_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_notification_channel_descriptor( + notification_service.GetNotificationChannelDescriptorRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_notification_channel_descriptor_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_descriptor), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannelDescriptor() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannelDescriptor()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_notification_channel_descriptor( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_notification_channel_descriptor_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_notification_channel_descriptor( + notification_service.GetNotificationChannelDescriptorRequest(), + name='name_value', + ) + + +def test_list_notification_channels(transport: str = 'grpc', request_type=notification_service.ListNotificationChannelsRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification_service.ListNotificationChannelsResponse( + next_page_token='next_page_token_value', + total_size=1086, + ) + response = client.list_notification_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.ListNotificationChannelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotificationChannelsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.total_size == 1086 + + +def test_list_notification_channels_from_dict(): + test_list_notification_channels(request_type=dict) + + +def test_list_notification_channels_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__') as call: + client.list_notification_channels() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.ListNotificationChannelsRequest() + + +@pytest.mark.asyncio +async def test_list_notification_channels_async(transport: str = 'grpc_asyncio', request_type=notification_service.ListNotificationChannelsRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelsResponse( + next_page_token='next_page_token_value', + total_size=1086, + )) + response = await client.list_notification_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.ListNotificationChannelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotificationChannelsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_notification_channels_async_from_dict(): + await test_list_notification_channels_async(request_type=dict) + + +def test_list_notification_channels_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.ListNotificationChannelsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__') as call: + call.return_value = notification_service.ListNotificationChannelsResponse() + client.list_notification_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_notification_channels_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.ListNotificationChannelsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelsResponse()) + await client.list_notification_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_notification_channels_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification_service.ListNotificationChannelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_notification_channels( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_list_notification_channels_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_notification_channels( + notification_service.ListNotificationChannelsRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_list_notification_channels_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification_service.ListNotificationChannelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_notification_channels( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_list_notification_channels_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_notification_channels( + notification_service.ListNotificationChannelsRequest(), + name='name_value', + ) + + +def test_list_notification_channels_pager(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + notification.NotificationChannel(), + notification.NotificationChannel(), + ], + next_page_token='abc', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[], + next_page_token='def', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + ], + next_page_token='ghi', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + notification.NotificationChannel(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.list_notification_channels(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, notification.NotificationChannel) + for i in results) + +def test_list_notification_channels_pages(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + notification.NotificationChannel(), + notification.NotificationChannel(), + ], + next_page_token='abc', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[], + next_page_token='def', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + ], + next_page_token='ghi', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + notification.NotificationChannel(), + ], + ), + RuntimeError, + ) + pages = list(client.list_notification_channels(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_notification_channels_async_pager(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + notification.NotificationChannel(), + notification.NotificationChannel(), + ], + next_page_token='abc', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[], + next_page_token='def', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + ], + next_page_token='ghi', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + notification.NotificationChannel(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_notification_channels(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, notification.NotificationChannel) + for i in responses) + +@pytest.mark.asyncio +async def test_list_notification_channels_async_pages(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notification_channels), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + notification.NotificationChannel(), + notification.NotificationChannel(), + ], + next_page_token='abc', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[], + next_page_token='def', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + ], + next_page_token='ghi', + ), + notification_service.ListNotificationChannelsResponse( + notification_channels=[ + notification.NotificationChannel(), + notification.NotificationChannel(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_notification_channels(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_notification_channel(transport: str = 'grpc', request_type=notification_service.GetNotificationChannelRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel( + type_='type__value', + name='name_value', + display_name='display_name_value', + description='description_value', + verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, + ) + response = client.get_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.GetNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannel) + assert response.type_ == 'type__value' + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED + + +def test_get_notification_channel_from_dict(): + test_get_notification_channel(request_type=dict) + + +def test_get_notification_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel), + '__call__') as call: + client.get_notification_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.GetNotificationChannelRequest() + + +@pytest.mark.asyncio +async def test_get_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.GetNotificationChannelRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel( + type_='type__value', + name='name_value', + display_name='display_name_value', + description='description_value', + verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, + )) + response = await client.get_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.GetNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannel) + assert response.type_ == 'type__value' + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED + + +@pytest.mark.asyncio +async def test_get_notification_channel_async_from_dict(): + await test_get_notification_channel_async(request_type=dict) + + +def test_get_notification_channel_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.GetNotificationChannelRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel), + '__call__') as call: + call.return_value = notification.NotificationChannel() + client.get_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_notification_channel_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.GetNotificationChannelRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) + await client.get_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_notification_channel_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_notification_channel( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_notification_channel_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_notification_channel( + notification_service.GetNotificationChannelRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_notification_channel_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_notification_channel( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_notification_channel_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_notification_channel( + notification_service.GetNotificationChannelRequest(), + name='name_value', + ) + + +def test_create_notification_channel(transport: str = 'grpc', request_type=notification_service.CreateNotificationChannelRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel( + type_='type__value', + name='name_value', + display_name='display_name_value', + description='description_value', + verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, + ) + response = client.create_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.CreateNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannel) + assert response.type_ == 'type__value' + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED + + +def test_create_notification_channel_from_dict(): + test_create_notification_channel(request_type=dict) + + +def test_create_notification_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_notification_channel), + '__call__') as call: + client.create_notification_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.CreateNotificationChannelRequest() + + +@pytest.mark.asyncio +async def test_create_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.CreateNotificationChannelRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel( + type_='type__value', + name='name_value', + display_name='display_name_value', + description='description_value', + verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, + )) + response = await client.create_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.CreateNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannel) + assert response.type_ == 'type__value' + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED + + +@pytest.mark.asyncio +async def test_create_notification_channel_async_from_dict(): + await test_create_notification_channel_async(request_type=dict) + + +def test_create_notification_channel_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.CreateNotificationChannelRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_notification_channel), + '__call__') as call: + call.return_value = notification.NotificationChannel() + client.create_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_notification_channel_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.CreateNotificationChannelRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_notification_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) + await client.create_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_notification_channel_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_notification_channel( + name='name_value', + notification_channel=notification.NotificationChannel(type_='type__value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].notification_channel == notification.NotificationChannel(type_='type__value') + + +def test_create_notification_channel_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_notification_channel( + notification_service.CreateNotificationChannelRequest(), + name='name_value', + notification_channel=notification.NotificationChannel(type_='type__value'), + ) + + +@pytest.mark.asyncio +async def test_create_notification_channel_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_notification_channel( + name='name_value', + notification_channel=notification.NotificationChannel(type_='type__value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].notification_channel == notification.NotificationChannel(type_='type__value') + + +@pytest.mark.asyncio +async def test_create_notification_channel_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_notification_channel( + notification_service.CreateNotificationChannelRequest(), + name='name_value', + notification_channel=notification.NotificationChannel(type_='type__value'), + ) + + +def test_update_notification_channel(transport: str = 'grpc', request_type=notification_service.UpdateNotificationChannelRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel( + type_='type__value', + name='name_value', + display_name='display_name_value', + description='description_value', + verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, + ) + response = client.update_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.UpdateNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannel) + assert response.type_ == 'type__value' + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED + + +def test_update_notification_channel_from_dict(): + test_update_notification_channel(request_type=dict) + + +def test_update_notification_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_notification_channel), + '__call__') as call: + client.update_notification_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.UpdateNotificationChannelRequest() + + +@pytest.mark.asyncio +async def test_update_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.UpdateNotificationChannelRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel( + type_='type__value', + name='name_value', + display_name='display_name_value', + description='description_value', + verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, + )) + response = await client.update_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.UpdateNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannel) + assert response.type_ == 'type__value' + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED + + +@pytest.mark.asyncio +async def test_update_notification_channel_async_from_dict(): + await test_update_notification_channel_async(request_type=dict) + + +def test_update_notification_channel_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.UpdateNotificationChannelRequest() + + request.notification_channel.name = 'notification_channel.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_notification_channel), + '__call__') as call: + call.return_value = notification.NotificationChannel() + client.update_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'notification_channel.name=notification_channel.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_notification_channel_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.UpdateNotificationChannelRequest() + + request.notification_channel.name = 'notification_channel.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_notification_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) + await client.update_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'notification_channel.name=notification_channel.name/value', + ) in kw['metadata'] + + +def test_update_notification_channel_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_notification_channel( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + notification_channel=notification.NotificationChannel(type_='type__value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + assert args[0].notification_channel == notification.NotificationChannel(type_='type__value') + + +def test_update_notification_channel_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_notification_channel( + notification_service.UpdateNotificationChannelRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + notification_channel=notification.NotificationChannel(type_='type__value'), + ) + + +@pytest.mark.asyncio +async def test_update_notification_channel_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_notification_channel( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + notification_channel=notification.NotificationChannel(type_='type__value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + assert args[0].notification_channel == notification.NotificationChannel(type_='type__value') + + +@pytest.mark.asyncio +async def test_update_notification_channel_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_notification_channel( + notification_service.UpdateNotificationChannelRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + notification_channel=notification.NotificationChannel(type_='type__value'), + ) + + +def test_delete_notification_channel(transport: str = 'grpc', request_type=notification_service.DeleteNotificationChannelRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.DeleteNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_notification_channel_from_dict(): + test_delete_notification_channel(request_type=dict) + + +def test_delete_notification_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_notification_channel), + '__call__') as call: + client.delete_notification_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.DeleteNotificationChannelRequest() + + +@pytest.mark.asyncio +async def test_delete_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.DeleteNotificationChannelRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.DeleteNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_notification_channel_async_from_dict(): + await test_delete_notification_channel_async(request_type=dict) + + +def test_delete_notification_channel_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.DeleteNotificationChannelRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_notification_channel), + '__call__') as call: + call.return_value = None + client.delete_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_notification_channel_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.DeleteNotificationChannelRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_notification_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_notification_channel_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_notification_channel( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].force == True + + +def test_delete_notification_channel_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_notification_channel( + notification_service.DeleteNotificationChannelRequest(), + name='name_value', + force=True, + ) + + +@pytest.mark.asyncio +async def test_delete_notification_channel_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_notification_channel( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].force == True + + +@pytest.mark.asyncio +async def test_delete_notification_channel_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_notification_channel( + notification_service.DeleteNotificationChannelRequest(), + name='name_value', + force=True, + ) + + +def test_send_notification_channel_verification_code(transport: str = 'grpc', request_type=notification_service.SendNotificationChannelVerificationCodeRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.send_notification_channel_verification_code), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.send_notification_channel_verification_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.SendNotificationChannelVerificationCodeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_send_notification_channel_verification_code_from_dict(): + test_send_notification_channel_verification_code(request_type=dict) + + +def test_send_notification_channel_verification_code_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.send_notification_channel_verification_code), + '__call__') as call: + client.send_notification_channel_verification_code() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.SendNotificationChannelVerificationCodeRequest() + + +@pytest.mark.asyncio +async def test_send_notification_channel_verification_code_async(transport: str = 'grpc_asyncio', request_type=notification_service.SendNotificationChannelVerificationCodeRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.send_notification_channel_verification_code), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.send_notification_channel_verification_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.SendNotificationChannelVerificationCodeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_send_notification_channel_verification_code_async_from_dict(): + await test_send_notification_channel_verification_code_async(request_type=dict) + + +def test_send_notification_channel_verification_code_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.SendNotificationChannelVerificationCodeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.send_notification_channel_verification_code), + '__call__') as call: + call.return_value = None + client.send_notification_channel_verification_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_send_notification_channel_verification_code_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.SendNotificationChannelVerificationCodeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.send_notification_channel_verification_code), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.send_notification_channel_verification_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_send_notification_channel_verification_code_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.send_notification_channel_verification_code), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.send_notification_channel_verification_code( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_send_notification_channel_verification_code_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.send_notification_channel_verification_code( + notification_service.SendNotificationChannelVerificationCodeRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_send_notification_channel_verification_code_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.send_notification_channel_verification_code), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.send_notification_channel_verification_code( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_send_notification_channel_verification_code_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.send_notification_channel_verification_code( + notification_service.SendNotificationChannelVerificationCodeRequest(), + name='name_value', + ) + + +def test_get_notification_channel_verification_code(transport: str = 'grpc', request_type=notification_service.GetNotificationChannelVerificationCodeRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_verification_code), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification_service.GetNotificationChannelVerificationCodeResponse( + code='code_value', + ) + response = client.get_notification_channel_verification_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.GetNotificationChannelVerificationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification_service.GetNotificationChannelVerificationCodeResponse) + assert response.code == 'code_value' + + +def test_get_notification_channel_verification_code_from_dict(): + test_get_notification_channel_verification_code(request_type=dict) + + +def test_get_notification_channel_verification_code_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_verification_code), + '__call__') as call: + client.get_notification_channel_verification_code() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.GetNotificationChannelVerificationCodeRequest() + + +@pytest.mark.asyncio +async def test_get_notification_channel_verification_code_async(transport: str = 'grpc_asyncio', request_type=notification_service.GetNotificationChannelVerificationCodeRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_verification_code), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification_service.GetNotificationChannelVerificationCodeResponse( + code='code_value', + )) + response = await client.get_notification_channel_verification_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.GetNotificationChannelVerificationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification_service.GetNotificationChannelVerificationCodeResponse) + assert response.code == 'code_value' + + +@pytest.mark.asyncio +async def test_get_notification_channel_verification_code_async_from_dict(): + await test_get_notification_channel_verification_code_async(request_type=dict) + + +def test_get_notification_channel_verification_code_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.GetNotificationChannelVerificationCodeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_verification_code), + '__call__') as call: + call.return_value = notification_service.GetNotificationChannelVerificationCodeResponse() + client.get_notification_channel_verification_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_notification_channel_verification_code_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.GetNotificationChannelVerificationCodeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_verification_code), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.GetNotificationChannelVerificationCodeResponse()) + await client.get_notification_channel_verification_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_notification_channel_verification_code_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_verification_code), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification_service.GetNotificationChannelVerificationCodeResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_notification_channel_verification_code( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_notification_channel_verification_code_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_notification_channel_verification_code( + notification_service.GetNotificationChannelVerificationCodeRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_notification_channel_verification_code_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_notification_channel_verification_code), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification_service.GetNotificationChannelVerificationCodeResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.GetNotificationChannelVerificationCodeResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_notification_channel_verification_code( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_notification_channel_verification_code_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_notification_channel_verification_code( + notification_service.GetNotificationChannelVerificationCodeRequest(), + name='name_value', + ) + + +def test_verify_notification_channel(transport: str = 'grpc', request_type=notification_service.VerifyNotificationChannelRequest): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel( + type_='type__value', + name='name_value', + display_name='display_name_value', + description='description_value', + verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, + ) + response = client.verify_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.VerifyNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannel) + assert response.type_ == 'type__value' + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED + + +def test_verify_notification_channel_from_dict(): + test_verify_notification_channel(request_type=dict) + + +def test_verify_notification_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_notification_channel), + '__call__') as call: + client.verify_notification_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.VerifyNotificationChannelRequest() + + +@pytest.mark.asyncio +async def test_verify_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.VerifyNotificationChannelRequest): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel( + type_='type__value', + name='name_value', + display_name='display_name_value', + description='description_value', + verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, + )) + response = await client.verify_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == notification_service.VerifyNotificationChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, notification.NotificationChannel) + assert response.type_ == 'type__value' + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED + + +@pytest.mark.asyncio +async def test_verify_notification_channel_async_from_dict(): + await test_verify_notification_channel_async(request_type=dict) + + +def test_verify_notification_channel_field_headers(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.VerifyNotificationChannelRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_notification_channel), + '__call__') as call: + call.return_value = notification.NotificationChannel() + client.verify_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_verify_notification_channel_field_headers_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = notification_service.VerifyNotificationChannelRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_notification_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) + await client.verify_notification_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_verify_notification_channel_flattened(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.verify_notification_channel( + name='name_value', + code='code_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].code == 'code_value' + + +def test_verify_notification_channel_flattened_error(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.verify_notification_channel( + notification_service.VerifyNotificationChannelRequest(), + name='name_value', + code='code_value', + ) + + +@pytest.mark.asyncio +async def test_verify_notification_channel_flattened_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_notification_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = notification.NotificationChannel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.verify_notification_channel( + name='name_value', + code='code_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].code == 'code_value' + + +@pytest.mark.asyncio +async def test_verify_notification_channel_flattened_error_async(): + client = NotificationChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.verify_notification_channel( + notification_service.VerifyNotificationChannelRequest(), + name='name_value', + code='code_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NotificationChannelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NotificationChannelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NotificationChannelServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NotificationChannelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NotificationChannelServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NotificationChannelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NotificationChannelServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.NotificationChannelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.NotificationChannelServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.NotificationChannelServiceGrpcTransport, + transports.NotificationChannelServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.NotificationChannelServiceGrpcTransport, + ) + +def test_notification_channel_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NotificationChannelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_notification_channel_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.NotificationChannelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_notification_channel_descriptors', + 'get_notification_channel_descriptor', + 'list_notification_channels', + 'get_notification_channel', + 'create_notification_channel', + 'update_notification_channel', + 'delete_notification_channel', + 'send_notification_channel_verification_code', + 'get_notification_channel_verification_code', + 'verify_notification_channel', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_notification_channel_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NotificationChannelServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_notification_channel_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NotificationChannelServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ), + quota_project_id="octopus", + ) + + +def test_notification_channel_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NotificationChannelServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_notification_channel_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NotificationChannelServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_notification_channel_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NotificationChannelServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotificationChannelServiceGrpcTransport, + transports.NotificationChannelServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_notification_channel_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotificationChannelServiceGrpcTransport, + transports.NotificationChannelServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_notification_channel_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.NotificationChannelServiceGrpcTransport, grpc_helpers), + (transports.NotificationChannelServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_notification_channel_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "monitoring.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + scopes=["1", "2"], + default_host="monitoring.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.NotificationChannelServiceGrpcTransport, transports.NotificationChannelServiceGrpcAsyncIOTransport]) +def test_notification_channel_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_notification_channel_service_host_no_port(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), + ) + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_notification_channel_service_host_with_port(): + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), + ) + assert client.transport._host == 'monitoring.googleapis.com:8000' + +def test_notification_channel_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.NotificationChannelServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_notification_channel_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.NotificationChannelServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.NotificationChannelServiceGrpcTransport, transports.NotificationChannelServiceGrpcAsyncIOTransport]) +def test_notification_channel_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.NotificationChannelServiceGrpcTransport, transports.NotificationChannelServiceGrpcAsyncIOTransport]) +def test_notification_channel_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_notification_channel_path(): + project = "squid" + notification_channel = "clam" + expected = "projects/{project}/notificationChannels/{notification_channel}".format(project=project, notification_channel=notification_channel, ) + actual = NotificationChannelServiceClient.notification_channel_path(project, notification_channel) + assert expected == actual + + +def test_parse_notification_channel_path(): + expected = { + "project": "whelk", + "notification_channel": "octopus", + } + path = NotificationChannelServiceClient.notification_channel_path(**expected) + + # Check that the path construction is reversible. + actual = NotificationChannelServiceClient.parse_notification_channel_path(path) + assert expected == actual + +def test_notification_channel_descriptor_path(): + project = "oyster" + channel_descriptor = "nudibranch" + expected = "projects/{project}/notificationChannelDescriptors/{channel_descriptor}".format(project=project, channel_descriptor=channel_descriptor, ) + actual = NotificationChannelServiceClient.notification_channel_descriptor_path(project, channel_descriptor) + assert expected == actual + + +def test_parse_notification_channel_descriptor_path(): + expected = { + "project": "cuttlefish", + "channel_descriptor": "mussel", + } + path = NotificationChannelServiceClient.notification_channel_descriptor_path(**expected) + + # Check that the path construction is reversible. + actual = NotificationChannelServiceClient.parse_notification_channel_descriptor_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = NotificationChannelServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = NotificationChannelServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NotificationChannelServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder, ) + actual = NotificationChannelServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = NotificationChannelServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NotificationChannelServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization, ) + actual = NotificationChannelServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = NotificationChannelServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NotificationChannelServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project, ) + actual = NotificationChannelServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = NotificationChannelServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NotificationChannelServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = NotificationChannelServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = NotificationChannelServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NotificationChannelServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.NotificationChannelServiceTransport, '_prep_wrapped_messages') as prep: + client = NotificationChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.NotificationChannelServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = NotificationChannelServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_query_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_query_service.py new file mode 100644 index 00000000..54868cab --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_query_service.py @@ -0,0 +1,1264 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.monitoring_v3.services.query_service import QueryServiceAsyncClient +from google.cloud.monitoring_v3.services.query_service import QueryServiceClient +from google.cloud.monitoring_v3.services.query_service import pagers +from google.cloud.monitoring_v3.services.query_service import transports +from google.cloud.monitoring_v3.services.query_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.monitoring_v3.types import metric +from google.cloud.monitoring_v3.types import metric_service +from google.oauth2 import service_account +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert QueryServiceClient._get_default_mtls_endpoint(None) is None + assert QueryServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert QueryServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert QueryServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert QueryServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert QueryServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + QueryServiceClient, + QueryServiceAsyncClient, +]) +def test_query_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.QueryServiceGrpcTransport, "grpc"), + (transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_query_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + QueryServiceClient, + QueryServiceAsyncClient, +]) +def test_query_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_query_service_client_get_transport_class(): + transport = QueryServiceClient.get_transport_class() + available_transports = [ + transports.QueryServiceGrpcTransport, + ] + assert transport in available_transports + + transport = QueryServiceClient.get_transport_class("grpc") + assert transport == transports.QueryServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc"), + (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(QueryServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QueryServiceClient)) +@mock.patch.object(QueryServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QueryServiceAsyncClient)) +def test_query_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(QueryServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(QueryServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc", "true"), + (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc", "false"), + (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(QueryServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QueryServiceClient)) +@mock.patch.object(QueryServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QueryServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_query_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc"), + (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_query_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc"), + (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_query_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_query_service_client_client_options_from_dict(): + with mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = QueryServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_query_time_series(transport: str = 'grpc', request_type=metric_service.QueryTimeSeriesRequest): + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metric_service.QueryTimeSeriesResponse( + next_page_token='next_page_token_value', + ) + response = client.query_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.QueryTimeSeriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.QueryTimeSeriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_query_time_series_from_dict(): + test_query_time_series(request_type=dict) + + +def test_query_time_series_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_time_series), + '__call__') as call: + client.query_time_series() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.QueryTimeSeriesRequest() + + +@pytest.mark.asyncio +async def test_query_time_series_async(transport: str = 'grpc_asyncio', request_type=metric_service.QueryTimeSeriesRequest): + client = QueryServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_time_series), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_service.QueryTimeSeriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.query_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metric_service.QueryTimeSeriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.QueryTimeSeriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_query_time_series_async_from_dict(): + await test_query_time_series_async(request_type=dict) + + +def test_query_time_series_field_headers(): + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.QueryTimeSeriesRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_time_series), + '__call__') as call: + call.return_value = metric_service.QueryTimeSeriesResponse() + client.query_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_query_time_series_field_headers_async(): + client = QueryServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metric_service.QueryTimeSeriesRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_time_series), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.QueryTimeSeriesResponse()) + await client.query_time_series(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_query_time_series_pager(): + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_time_series), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + metric.TimeSeriesData(), + metric.TimeSeriesData(), + ], + next_page_token='abc', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[], + next_page_token='def', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + ], + next_page_token='ghi', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + metric.TimeSeriesData(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.query_time_series(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, metric.TimeSeriesData) + for i in results) + +def test_query_time_series_pages(): + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_time_series), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + metric.TimeSeriesData(), + metric.TimeSeriesData(), + ], + next_page_token='abc', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[], + next_page_token='def', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + ], + next_page_token='ghi', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + metric.TimeSeriesData(), + ], + ), + RuntimeError, + ) + pages = list(client.query_time_series(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_query_time_series_async_pager(): + client = QueryServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_time_series), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + metric.TimeSeriesData(), + metric.TimeSeriesData(), + ], + next_page_token='abc', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[], + next_page_token='def', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + ], + next_page_token='ghi', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + metric.TimeSeriesData(), + ], + ), + RuntimeError, + ) + async_pager = await client.query_time_series(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metric.TimeSeriesData) + for i in responses) + +@pytest.mark.asyncio +async def test_query_time_series_async_pages(): + client = QueryServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_time_series), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + metric.TimeSeriesData(), + metric.TimeSeriesData(), + ], + next_page_token='abc', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[], + next_page_token='def', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + ], + next_page_token='ghi', + ), + metric_service.QueryTimeSeriesResponse( + time_series_data=[ + metric.TimeSeriesData(), + metric.TimeSeriesData(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.query_time_series(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.QueryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.QueryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = QueryServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.QueryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = QueryServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.QueryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = QueryServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.QueryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.QueryServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.QueryServiceGrpcTransport, + transports.QueryServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.QueryServiceGrpcTransport, + ) + +def test_query_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.QueryServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_query_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.QueryServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'query_time_series', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_query_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.QueryServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_query_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.QueryServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ), + quota_project_id="octopus", + ) + + +def test_query_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.QueryServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_query_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + QueryServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_query_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + QueryServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.QueryServiceGrpcTransport, + transports.QueryServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_query_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.QueryServiceGrpcTransport, + transports.QueryServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_query_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.QueryServiceGrpcTransport, grpc_helpers), + (transports.QueryServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_query_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "monitoring.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + scopes=["1", "2"], + default_host="monitoring.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.QueryServiceGrpcTransport, transports.QueryServiceGrpcAsyncIOTransport]) +def test_query_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_query_service_host_no_port(): + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), + ) + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_query_service_host_with_port(): + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), + ) + assert client.transport._host == 'monitoring.googleapis.com:8000' + +def test_query_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.QueryServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_query_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.QueryServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.QueryServiceGrpcTransport, transports.QueryServiceGrpcAsyncIOTransport]) +def test_query_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.QueryServiceGrpcTransport, transports.QueryServiceGrpcAsyncIOTransport]) +def test_query_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = QueryServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = QueryServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = QueryServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = QueryServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = QueryServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = QueryServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = QueryServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = QueryServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = QueryServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = QueryServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = QueryServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = QueryServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = QueryServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = QueryServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = QueryServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.QueryServiceTransport, '_prep_wrapped_messages') as prep: + client = QueryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.QueryServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = QueryServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py new file mode 100644 index 00000000..e1cbc2e9 --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py @@ -0,0 +1,3703 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.monitoring_v3.services.service_monitoring_service import ServiceMonitoringServiceAsyncClient +from google.cloud.monitoring_v3.services.service_monitoring_service import ServiceMonitoringServiceClient +from google.cloud.monitoring_v3.services.service_monitoring_service import pagers +from google.cloud.monitoring_v3.services.service_monitoring_service import transports +from google.cloud.monitoring_v3.services.service_monitoring_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.monitoring_v3.types import service +from google.cloud.monitoring_v3.types import service as gm_service +from google.cloud.monitoring_v3.types import service_service +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import calendar_period_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(None) is None + assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + ServiceMonitoringServiceClient, + ServiceMonitoringServiceAsyncClient, +]) +def test_service_monitoring_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ServiceMonitoringServiceGrpcTransport, "grpc"), + (transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_service_monitoring_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + ServiceMonitoringServiceClient, + ServiceMonitoringServiceAsyncClient, +]) +def test_service_monitoring_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_service_monitoring_service_client_get_transport_class(): + transport = ServiceMonitoringServiceClient.get_transport_class() + available_transports = [ + transports.ServiceMonitoringServiceGrpcTransport, + ] + assert transport in available_transports + + transport = ServiceMonitoringServiceClient.get_transport_class("grpc") + assert transport == transports.ServiceMonitoringServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc"), + (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(ServiceMonitoringServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceMonitoringServiceClient)) +@mock.patch.object(ServiceMonitoringServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceMonitoringServiceAsyncClient)) +def test_service_monitoring_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ServiceMonitoringServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ServiceMonitoringServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc", "true"), + (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc", "false"), + (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(ServiceMonitoringServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceMonitoringServiceClient)) +@mock.patch.object(ServiceMonitoringServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceMonitoringServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_service_monitoring_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc"), + (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_service_monitoring_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc"), + (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_service_monitoring_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_service_monitoring_service_client_client_options_from_dict(): + with mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = ServiceMonitoringServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_create_service(transport: str = 'grpc', request_type=service_service.CreateServiceRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_service.Service( + name='name_value', + display_name='display_name_value', + custom=None, + ) + response = client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.CreateServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gm_service.Service) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +def test_create_service_from_dict(): + test_create_service(request_type=dict) + + +def test_create_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + client.create_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.CreateServiceRequest() + + +@pytest.mark.asyncio +async def test_create_service_async(transport: str = 'grpc_asyncio', request_type=service_service.CreateServiceRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service( + name='name_value', + display_name='display_name_value', + )) + response = await client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.CreateServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gm_service.Service) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.asyncio +async def test_create_service_async_from_dict(): + await test_create_service_async(request_type=dict) + + +def test_create_service_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.CreateServiceRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + call.return_value = gm_service.Service() + client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_service_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.CreateServiceRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service()) + await client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_service_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_service.Service() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_service( + parent='parent_value', + service=gm_service.Service(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].service == gm_service.Service(name='name_value') + + +def test_create_service_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_service( + service_service.CreateServiceRequest(), + parent='parent_value', + service=gm_service.Service(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_service_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_service.Service() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_service( + parent='parent_value', + service=gm_service.Service(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].service == gm_service.Service(name='name_value') + + +@pytest.mark.asyncio +async def test_create_service_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_service( + service_service.CreateServiceRequest(), + parent='parent_value', + service=gm_service.Service(name='name_value'), + ) + + +def test_get_service(transport: str = 'grpc', request_type=service_service.GetServiceRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.Service( + name='name_value', + display_name='display_name_value', + custom=None, + ) + response = client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.GetServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Service) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +def test_get_service_from_dict(): + test_get_service(request_type=dict) + + +def test_get_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + client.get_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.GetServiceRequest() + + +@pytest.mark.asyncio +async def test_get_service_async(transport: str = 'grpc_asyncio', request_type=service_service.GetServiceRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.Service( + name='name_value', + display_name='display_name_value', + )) + response = await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.GetServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Service) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.asyncio +async def test_get_service_async_from_dict(): + await test_get_service_async(request_type=dict) + + +def test_get_service_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.GetServiceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value = service.Service() + client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_service_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.GetServiceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Service()) + await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_service_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.Service() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_service( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_service_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_service( + service_service.GetServiceRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_service_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.Service() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Service()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_service( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_service_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_service( + service_service.GetServiceRequest(), + name='name_value', + ) + + +def test_list_services(transport: str = 'grpc', request_type=service_service.ListServicesRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_service.ListServicesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.ListServicesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_services_from_dict(): + test_list_services(request_type=dict) + + +def test_list_services_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + client.list_services() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.ListServicesRequest() + + +@pytest.mark.asyncio +async def test_list_services_async(transport: str = 'grpc_asyncio', request_type=service_service.ListServicesRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServicesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.ListServicesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_services_async_from_dict(): + await test_list_services_async(request_type=dict) + + +def test_list_services_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.ListServicesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + call.return_value = service_service.ListServicesResponse() + client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_services_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.ListServicesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServicesResponse()) + await client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_services_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_service.ListServicesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_services( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_services_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_services( + service_service.ListServicesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_services_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_service.ListServicesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServicesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_services( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_services_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_services( + service_service.ListServicesRequest(), + parent='parent_value', + ) + + +def test_list_services_pager(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service_service.ListServicesResponse( + services=[ + service.Service(), + service.Service(), + service.Service(), + ], + next_page_token='abc', + ), + service_service.ListServicesResponse( + services=[], + next_page_token='def', + ), + service_service.ListServicesResponse( + services=[ + service.Service(), + ], + next_page_token='ghi', + ), + service_service.ListServicesResponse( + services=[ + service.Service(), + service.Service(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_services(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, service.Service) + for i in results) + +def test_list_services_pages(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service_service.ListServicesResponse( + services=[ + service.Service(), + service.Service(), + service.Service(), + ], + next_page_token='abc', + ), + service_service.ListServicesResponse( + services=[], + next_page_token='def', + ), + service_service.ListServicesResponse( + services=[ + service.Service(), + ], + next_page_token='ghi', + ), + service_service.ListServicesResponse( + services=[ + service.Service(), + service.Service(), + ], + ), + RuntimeError, + ) + pages = list(client.list_services(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_services_async_pager(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service_service.ListServicesResponse( + services=[ + service.Service(), + service.Service(), + service.Service(), + ], + next_page_token='abc', + ), + service_service.ListServicesResponse( + services=[], + next_page_token='def', + ), + service_service.ListServicesResponse( + services=[ + service.Service(), + ], + next_page_token='ghi', + ), + service_service.ListServicesResponse( + services=[ + service.Service(), + service.Service(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_services(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, service.Service) + for i in responses) + +@pytest.mark.asyncio +async def test_list_services_async_pages(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service_service.ListServicesResponse( + services=[ + service.Service(), + service.Service(), + service.Service(), + ], + next_page_token='abc', + ), + service_service.ListServicesResponse( + services=[], + next_page_token='def', + ), + service_service.ListServicesResponse( + services=[ + service.Service(), + ], + next_page_token='ghi', + ), + service_service.ListServicesResponse( + services=[ + service.Service(), + service.Service(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_services(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_update_service(transport: str = 'grpc', request_type=service_service.UpdateServiceRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_service.Service( + name='name_value', + display_name='display_name_value', + custom=None, + ) + response = client.update_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.UpdateServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gm_service.Service) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +def test_update_service_from_dict(): + test_update_service(request_type=dict) + + +def test_update_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service), + '__call__') as call: + client.update_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.UpdateServiceRequest() + + +@pytest.mark.asyncio +async def test_update_service_async(transport: str = 'grpc_asyncio', request_type=service_service.UpdateServiceRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service( + name='name_value', + display_name='display_name_value', + )) + response = await client.update_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.UpdateServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gm_service.Service) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.asyncio +async def test_update_service_async_from_dict(): + await test_update_service_async(request_type=dict) + + +def test_update_service_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.UpdateServiceRequest() + + request.service.name = 'service.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service), + '__call__') as call: + call.return_value = gm_service.Service() + client.update_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service.name=service.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_service_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.UpdateServiceRequest() + + request.service.name = 'service.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service()) + await client.update_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service.name=service.name/value', + ) in kw['metadata'] + + +def test_update_service_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_service.Service() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_service( + service=gm_service.Service(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].service == gm_service.Service(name='name_value') + + +def test_update_service_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_service( + service_service.UpdateServiceRequest(), + service=gm_service.Service(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_update_service_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gm_service.Service() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_service( + service=gm_service.Service(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].service == gm_service.Service(name='name_value') + + +@pytest.mark.asyncio +async def test_update_service_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_service( + service_service.UpdateServiceRequest(), + service=gm_service.Service(name='name_value'), + ) + + +def test_delete_service(transport: str = 'grpc', request_type=service_service.DeleteServiceRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.DeleteServiceRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_service_from_dict(): + test_delete_service(request_type=dict) + + +def test_delete_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + client.delete_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.DeleteServiceRequest() + + +@pytest.mark.asyncio +async def test_delete_service_async(transport: str = 'grpc_asyncio', request_type=service_service.DeleteServiceRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.DeleteServiceRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_service_async_from_dict(): + await test_delete_service_async(request_type=dict) + + +def test_delete_service_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.DeleteServiceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + call.return_value = None + client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_service_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.DeleteServiceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_service_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_service( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_service_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_service( + service_service.DeleteServiceRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_service_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_service( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_service_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_service( + service_service.DeleteServiceRequest(), + name='name_value', + ) + + +def test_create_service_level_objective(transport: str = 'grpc', request_type=service_service.CreateServiceLevelObjectiveRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ServiceLevelObjective( + name='name_value', + display_name='display_name_value', + goal=0.419, + rolling_period=duration_pb2.Duration(seconds=751), + ) + response = client.create_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.CreateServiceLevelObjectiveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ServiceLevelObjective) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert math.isclose(response.goal, 0.419, rel_tol=1e-6) + + +def test_create_service_level_objective_from_dict(): + test_create_service_level_objective(request_type=dict) + + +def test_create_service_level_objective_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_level_objective), + '__call__') as call: + client.create_service_level_objective() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.CreateServiceLevelObjectiveRequest() + + +@pytest.mark.asyncio +async def test_create_service_level_objective_async(transport: str = 'grpc_asyncio', request_type=service_service.CreateServiceLevelObjectiveRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective( + name='name_value', + display_name='display_name_value', + goal=0.419, + )) + response = await client.create_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.CreateServiceLevelObjectiveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ServiceLevelObjective) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert math.isclose(response.goal, 0.419, rel_tol=1e-6) + + +@pytest.mark.asyncio +async def test_create_service_level_objective_async_from_dict(): + await test_create_service_level_objective_async(request_type=dict) + + +def test_create_service_level_objective_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.CreateServiceLevelObjectiveRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_level_objective), + '__call__') as call: + call.return_value = service.ServiceLevelObjective() + client.create_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_service_level_objective_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.CreateServiceLevelObjectiveRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_level_objective), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) + await client.create_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_service_level_objective_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ServiceLevelObjective() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_service_level_objective( + parent='parent_value', + service_level_objective=service.ServiceLevelObjective(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].service_level_objective == service.ServiceLevelObjective(name='name_value') + + +def test_create_service_level_objective_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_service_level_objective( + service_service.CreateServiceLevelObjectiveRequest(), + parent='parent_value', + service_level_objective=service.ServiceLevelObjective(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_service_level_objective_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ServiceLevelObjective() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_service_level_objective( + parent='parent_value', + service_level_objective=service.ServiceLevelObjective(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].service_level_objective == service.ServiceLevelObjective(name='name_value') + + +@pytest.mark.asyncio +async def test_create_service_level_objective_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_service_level_objective( + service_service.CreateServiceLevelObjectiveRequest(), + parent='parent_value', + service_level_objective=service.ServiceLevelObjective(name='name_value'), + ) + + +def test_get_service_level_objective(transport: str = 'grpc', request_type=service_service.GetServiceLevelObjectiveRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ServiceLevelObjective( + name='name_value', + display_name='display_name_value', + goal=0.419, + rolling_period=duration_pb2.Duration(seconds=751), + ) + response = client.get_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.GetServiceLevelObjectiveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ServiceLevelObjective) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert math.isclose(response.goal, 0.419, rel_tol=1e-6) + + +def test_get_service_level_objective_from_dict(): + test_get_service_level_objective(request_type=dict) + + +def test_get_service_level_objective_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_level_objective), + '__call__') as call: + client.get_service_level_objective() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.GetServiceLevelObjectiveRequest() + + +@pytest.mark.asyncio +async def test_get_service_level_objective_async(transport: str = 'grpc_asyncio', request_type=service_service.GetServiceLevelObjectiveRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective( + name='name_value', + display_name='display_name_value', + goal=0.419, + )) + response = await client.get_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.GetServiceLevelObjectiveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ServiceLevelObjective) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert math.isclose(response.goal, 0.419, rel_tol=1e-6) + + +@pytest.mark.asyncio +async def test_get_service_level_objective_async_from_dict(): + await test_get_service_level_objective_async(request_type=dict) + + +def test_get_service_level_objective_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.GetServiceLevelObjectiveRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_level_objective), + '__call__') as call: + call.return_value = service.ServiceLevelObjective() + client.get_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_service_level_objective_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.GetServiceLevelObjectiveRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_level_objective), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) + await client.get_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_service_level_objective_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ServiceLevelObjective() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_service_level_objective( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_service_level_objective_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_service_level_objective( + service_service.GetServiceLevelObjectiveRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_service_level_objective_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ServiceLevelObjective() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_service_level_objective( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_service_level_objective_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_service_level_objective( + service_service.GetServiceLevelObjectiveRequest(), + name='name_value', + ) + + +def test_list_service_level_objectives(transport: str = 'grpc', request_type=service_service.ListServiceLevelObjectivesRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_service.ListServiceLevelObjectivesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_service_level_objectives(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.ListServiceLevelObjectivesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServiceLevelObjectivesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_service_level_objectives_from_dict(): + test_list_service_level_objectives(request_type=dict) + + +def test_list_service_level_objectives_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__') as call: + client.list_service_level_objectives() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.ListServiceLevelObjectivesRequest() + + +@pytest.mark.asyncio +async def test_list_service_level_objectives_async(transport: str = 'grpc_asyncio', request_type=service_service.ListServiceLevelObjectivesRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServiceLevelObjectivesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_service_level_objectives(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.ListServiceLevelObjectivesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServiceLevelObjectivesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_service_level_objectives_async_from_dict(): + await test_list_service_level_objectives_async(request_type=dict) + + +def test_list_service_level_objectives_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.ListServiceLevelObjectivesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__') as call: + call.return_value = service_service.ListServiceLevelObjectivesResponse() + client.list_service_level_objectives(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_service_level_objectives_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.ListServiceLevelObjectivesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServiceLevelObjectivesResponse()) + await client.list_service_level_objectives(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_service_level_objectives_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_service.ListServiceLevelObjectivesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_service_level_objectives( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_service_level_objectives_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_service_level_objectives( + service_service.ListServiceLevelObjectivesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_service_level_objectives_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_service.ListServiceLevelObjectivesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServiceLevelObjectivesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_service_level_objectives( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_service_level_objectives_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_service_level_objectives( + service_service.ListServiceLevelObjectivesRequest(), + parent='parent_value', + ) + + +def test_list_service_level_objectives_pager(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + ], + next_page_token='abc', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[], + next_page_token='def', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + ], + next_page_token='ghi', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_service_level_objectives(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, service.ServiceLevelObjective) + for i in results) + +def test_list_service_level_objectives_pages(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + ], + next_page_token='abc', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[], + next_page_token='def', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + ], + next_page_token='ghi', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + ], + ), + RuntimeError, + ) + pages = list(client.list_service_level_objectives(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_service_level_objectives_async_pager(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + ], + next_page_token='abc', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[], + next_page_token='def', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + ], + next_page_token='ghi', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_service_level_objectives(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, service.ServiceLevelObjective) + for i in responses) + +@pytest.mark.asyncio +async def test_list_service_level_objectives_async_pages(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_level_objectives), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + ], + next_page_token='abc', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[], + next_page_token='def', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + ], + next_page_token='ghi', + ), + service_service.ListServiceLevelObjectivesResponse( + service_level_objectives=[ + service.ServiceLevelObjective(), + service.ServiceLevelObjective(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_service_level_objectives(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_update_service_level_objective(transport: str = 'grpc', request_type=service_service.UpdateServiceLevelObjectiveRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ServiceLevelObjective( + name='name_value', + display_name='display_name_value', + goal=0.419, + rolling_period=duration_pb2.Duration(seconds=751), + ) + response = client.update_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.UpdateServiceLevelObjectiveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ServiceLevelObjective) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert math.isclose(response.goal, 0.419, rel_tol=1e-6) + + +def test_update_service_level_objective_from_dict(): + test_update_service_level_objective(request_type=dict) + + +def test_update_service_level_objective_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service_level_objective), + '__call__') as call: + client.update_service_level_objective() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.UpdateServiceLevelObjectiveRequest() + + +@pytest.mark.asyncio +async def test_update_service_level_objective_async(transport: str = 'grpc_asyncio', request_type=service_service.UpdateServiceLevelObjectiveRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective( + name='name_value', + display_name='display_name_value', + goal=0.419, + )) + response = await client.update_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.UpdateServiceLevelObjectiveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ServiceLevelObjective) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert math.isclose(response.goal, 0.419, rel_tol=1e-6) + + +@pytest.mark.asyncio +async def test_update_service_level_objective_async_from_dict(): + await test_update_service_level_objective_async(request_type=dict) + + +def test_update_service_level_objective_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.UpdateServiceLevelObjectiveRequest() + + request.service_level_objective.name = 'service_level_objective.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service_level_objective), + '__call__') as call: + call.return_value = service.ServiceLevelObjective() + client.update_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_level_objective.name=service_level_objective.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_service_level_objective_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.UpdateServiceLevelObjectiveRequest() + + request.service_level_objective.name = 'service_level_objective.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service_level_objective), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) + await client.update_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_level_objective.name=service_level_objective.name/value', + ) in kw['metadata'] + + +def test_update_service_level_objective_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ServiceLevelObjective() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_service_level_objective( + service_level_objective=service.ServiceLevelObjective(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].service_level_objective == service.ServiceLevelObjective(name='name_value') + + +def test_update_service_level_objective_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_service_level_objective( + service_service.UpdateServiceLevelObjectiveRequest(), + service_level_objective=service.ServiceLevelObjective(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_update_service_level_objective_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ServiceLevelObjective() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_service_level_objective( + service_level_objective=service.ServiceLevelObjective(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].service_level_objective == service.ServiceLevelObjective(name='name_value') + + +@pytest.mark.asyncio +async def test_update_service_level_objective_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_service_level_objective( + service_service.UpdateServiceLevelObjectiveRequest(), + service_level_objective=service.ServiceLevelObjective(name='name_value'), + ) + + +def test_delete_service_level_objective(transport: str = 'grpc', request_type=service_service.DeleteServiceLevelObjectiveRequest): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.DeleteServiceLevelObjectiveRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_service_level_objective_from_dict(): + test_delete_service_level_objective(request_type=dict) + + +def test_delete_service_level_objective_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service_level_objective), + '__call__') as call: + client.delete_service_level_objective() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.DeleteServiceLevelObjectiveRequest() + + +@pytest.mark.asyncio +async def test_delete_service_level_objective_async(transport: str = 'grpc_asyncio', request_type=service_service.DeleteServiceLevelObjectiveRequest): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service_service.DeleteServiceLevelObjectiveRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_service_level_objective_async_from_dict(): + await test_delete_service_level_objective_async(request_type=dict) + + +def test_delete_service_level_objective_field_headers(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.DeleteServiceLevelObjectiveRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service_level_objective), + '__call__') as call: + call.return_value = None + client.delete_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_service_level_objective_field_headers_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_service.DeleteServiceLevelObjectiveRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service_level_objective), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_service_level_objective(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_service_level_objective_flattened(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_service_level_objective( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_service_level_objective_flattened_error(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_service_level_objective( + service_service.DeleteServiceLevelObjectiveRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_service_level_objective_flattened_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service_level_objective), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_service_level_objective( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_service_level_objective_flattened_error_async(): + client = ServiceMonitoringServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_service_level_objective( + service_service.DeleteServiceLevelObjectiveRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ServiceMonitoringServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ServiceMonitoringServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceMonitoringServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ServiceMonitoringServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceMonitoringServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceMonitoringServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ServiceMonitoringServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceMonitoringServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ServiceMonitoringServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ServiceMonitoringServiceGrpcTransport, + transports.ServiceMonitoringServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ServiceMonitoringServiceGrpcTransport, + ) + +def test_service_monitoring_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ServiceMonitoringServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_service_monitoring_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ServiceMonitoringServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_service', + 'get_service', + 'list_services', + 'update_service', + 'delete_service', + 'create_service_level_objective', + 'get_service_level_objective', + 'list_service_level_objectives', + 'update_service_level_objective', + 'delete_service_level_objective', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_service_monitoring_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceMonitoringServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_service_monitoring_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceMonitoringServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ), + quota_project_id="octopus", + ) + + +def test_service_monitoring_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceMonitoringServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_service_monitoring_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ServiceMonitoringServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_service_monitoring_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ServiceMonitoringServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceMonitoringServiceGrpcTransport, + transports.ServiceMonitoringServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_service_monitoring_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceMonitoringServiceGrpcTransport, + transports.ServiceMonitoringServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_service_monitoring_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ServiceMonitoringServiceGrpcTransport, grpc_helpers), + (transports.ServiceMonitoringServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_service_monitoring_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "monitoring.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + scopes=["1", "2"], + default_host="monitoring.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ServiceMonitoringServiceGrpcTransport, transports.ServiceMonitoringServiceGrpcAsyncIOTransport]) +def test_service_monitoring_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_service_monitoring_service_host_no_port(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), + ) + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_service_monitoring_service_host_with_port(): + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), + ) + assert client.transport._host == 'monitoring.googleapis.com:8000' + +def test_service_monitoring_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceMonitoringServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_service_monitoring_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceMonitoringServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceMonitoringServiceGrpcTransport, transports.ServiceMonitoringServiceGrpcAsyncIOTransport]) +def test_service_monitoring_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceMonitoringServiceGrpcTransport, transports.ServiceMonitoringServiceGrpcAsyncIOTransport]) +def test_service_monitoring_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_service_path(): + project = "squid" + service = "clam" + expected = "projects/{project}/services/{service}".format(project=project, service=service, ) + actual = ServiceMonitoringServiceClient.service_path(project, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "whelk", + "service": "octopus", + } + path = ServiceMonitoringServiceClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceMonitoringServiceClient.parse_service_path(path) + assert expected == actual + +def test_service_level_objective_path(): + project = "oyster" + service = "nudibranch" + service_level_objective = "cuttlefish" + expected = "projects/{project}/services/{service}/serviceLevelObjectives/{service_level_objective}".format(project=project, service=service, service_level_objective=service_level_objective, ) + actual = ServiceMonitoringServiceClient.service_level_objective_path(project, service, service_level_objective) + assert expected == actual + + +def test_parse_service_level_objective_path(): + expected = { + "project": "mussel", + "service": "winkle", + "service_level_objective": "nautilus", + } + path = ServiceMonitoringServiceClient.service_level_objective_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceMonitoringServiceClient.parse_service_level_objective_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ServiceMonitoringServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = ServiceMonitoringServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceMonitoringServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = ServiceMonitoringServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = ServiceMonitoringServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceMonitoringServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ServiceMonitoringServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = ServiceMonitoringServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceMonitoringServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = ServiceMonitoringServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = ServiceMonitoringServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceMonitoringServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ServiceMonitoringServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = ServiceMonitoringServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceMonitoringServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ServiceMonitoringServiceTransport, '_prep_wrapped_messages') as prep: + client = ServiceMonitoringServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ServiceMonitoringServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = ServiceMonitoringServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py new file mode 100644 index 00000000..54e5cb8c --- /dev/null +++ b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py @@ -0,0 +1,2610 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api import monitored_resource_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.monitoring_v3.services.uptime_check_service import UptimeCheckServiceAsyncClient +from google.cloud.monitoring_v3.services.uptime_check_service import UptimeCheckServiceClient +from google.cloud.monitoring_v3.services.uptime_check_service import pagers +from google.cloud.monitoring_v3.services.uptime_check_service import transports +from google.cloud.monitoring_v3.services.uptime_check_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.monitoring_v3.types import uptime +from google.cloud.monitoring_v3.types import uptime_service +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert UptimeCheckServiceClient._get_default_mtls_endpoint(None) is None + assert UptimeCheckServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert UptimeCheckServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert UptimeCheckServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert UptimeCheckServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert UptimeCheckServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + UptimeCheckServiceClient, + UptimeCheckServiceAsyncClient, +]) +def test_uptime_check_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.UptimeCheckServiceGrpcTransport, "grpc"), + (transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_uptime_check_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + UptimeCheckServiceClient, + UptimeCheckServiceAsyncClient, +]) +def test_uptime_check_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_uptime_check_service_client_get_transport_class(): + transport = UptimeCheckServiceClient.get_transport_class() + available_transports = [ + transports.UptimeCheckServiceGrpcTransport, + ] + assert transport in available_transports + + transport = UptimeCheckServiceClient.get_transport_class("grpc") + assert transport == transports.UptimeCheckServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc"), + (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(UptimeCheckServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UptimeCheckServiceClient)) +@mock.patch.object(UptimeCheckServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UptimeCheckServiceAsyncClient)) +def test_uptime_check_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(UptimeCheckServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(UptimeCheckServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc", "true"), + (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc", "false"), + (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(UptimeCheckServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UptimeCheckServiceClient)) +@mock.patch.object(UptimeCheckServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UptimeCheckServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_uptime_check_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc"), + (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_uptime_check_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc"), + (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_uptime_check_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_uptime_check_service_client_client_options_from_dict(): + with mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = UptimeCheckServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_uptime_check_configs(transport: str = 'grpc', request_type=uptime_service.ListUptimeCheckConfigsRequest): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime_service.ListUptimeCheckConfigsResponse( + next_page_token='next_page_token_value', + total_size=1086, + ) + response = client.list_uptime_check_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.ListUptimeCheckConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUptimeCheckConfigsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.total_size == 1086 + + +def test_list_uptime_check_configs_from_dict(): + test_list_uptime_check_configs(request_type=dict) + + +def test_list_uptime_check_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__') as call: + client.list_uptime_check_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.ListUptimeCheckConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_uptime_check_configs_async(transport: str = 'grpc_asyncio', request_type=uptime_service.ListUptimeCheckConfigsRequest): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime_service.ListUptimeCheckConfigsResponse( + next_page_token='next_page_token_value', + total_size=1086, + )) + response = await client.list_uptime_check_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.ListUptimeCheckConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUptimeCheckConfigsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_uptime_check_configs_async_from_dict(): + await test_list_uptime_check_configs_async(request_type=dict) + + +def test_list_uptime_check_configs_field_headers(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.ListUptimeCheckConfigsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__') as call: + call.return_value = uptime_service.ListUptimeCheckConfigsResponse() + client.list_uptime_check_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_uptime_check_configs_field_headers_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.ListUptimeCheckConfigsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime_service.ListUptimeCheckConfigsResponse()) + await client.list_uptime_check_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_uptime_check_configs_flattened(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime_service.ListUptimeCheckConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_uptime_check_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_uptime_check_configs_flattened_error(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_uptime_check_configs( + uptime_service.ListUptimeCheckConfigsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_uptime_check_configs_flattened_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime_service.ListUptimeCheckConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime_service.ListUptimeCheckConfigsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_uptime_check_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_uptime_check_configs_flattened_error_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_uptime_check_configs( + uptime_service.ListUptimeCheckConfigsRequest(), + parent='parent_value', + ) + + +def test_list_uptime_check_configs_pager(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + ], + next_page_token='abc', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[], + next_page_token='def', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + ], + next_page_token='ghi', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_uptime_check_configs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, uptime.UptimeCheckConfig) + for i in results) + +def test_list_uptime_check_configs_pages(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + ], + next_page_token='abc', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[], + next_page_token='def', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + ], + next_page_token='ghi', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_uptime_check_configs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_uptime_check_configs_async_pager(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + ], + next_page_token='abc', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[], + next_page_token='def', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + ], + next_page_token='ghi', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_uptime_check_configs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, uptime.UptimeCheckConfig) + for i in responses) + +@pytest.mark.asyncio +async def test_list_uptime_check_configs_async_pages(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + ], + next_page_token='abc', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[], + next_page_token='def', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + ], + next_page_token='ghi', + ), + uptime_service.ListUptimeCheckConfigsResponse( + uptime_check_configs=[ + uptime.UptimeCheckConfig(), + uptime.UptimeCheckConfig(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_uptime_check_configs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_uptime_check_config(transport: str = 'grpc', request_type=uptime_service.GetUptimeCheckConfigRequest): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime.UptimeCheckConfig( + name='name_value', + display_name='display_name_value', + selected_regions=[uptime.UptimeCheckRegion.USA], + is_internal=True, + monitored_resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + http_check=uptime.UptimeCheckConfig.HttpCheck(request_method=uptime.UptimeCheckConfig.HttpCheck.RequestMethod.GET), + ) + response = client.get_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.GetUptimeCheckConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, uptime.UptimeCheckConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.selected_regions == [uptime.UptimeCheckRegion.USA] + assert response.is_internal is True + + +def test_get_uptime_check_config_from_dict(): + test_get_uptime_check_config(request_type=dict) + + +def test_get_uptime_check_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_uptime_check_config), + '__call__') as call: + client.get_uptime_check_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.GetUptimeCheckConfigRequest() + + +@pytest.mark.asyncio +async def test_get_uptime_check_config_async(transport: str = 'grpc_asyncio', request_type=uptime_service.GetUptimeCheckConfigRequest): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig( + name='name_value', + display_name='display_name_value', + selected_regions=[uptime.UptimeCheckRegion.USA], + is_internal=True, + )) + response = await client.get_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.GetUptimeCheckConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, uptime.UptimeCheckConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.selected_regions == [uptime.UptimeCheckRegion.USA] + assert response.is_internal is True + + +@pytest.mark.asyncio +async def test_get_uptime_check_config_async_from_dict(): + await test_get_uptime_check_config_async(request_type=dict) + + +def test_get_uptime_check_config_field_headers(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.GetUptimeCheckConfigRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_uptime_check_config), + '__call__') as call: + call.return_value = uptime.UptimeCheckConfig() + client.get_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_uptime_check_config_field_headers_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.GetUptimeCheckConfigRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_uptime_check_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) + await client.get_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_uptime_check_config_flattened(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime.UptimeCheckConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_uptime_check_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_uptime_check_config_flattened_error(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_uptime_check_config( + uptime_service.GetUptimeCheckConfigRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_uptime_check_config_flattened_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime.UptimeCheckConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_uptime_check_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_uptime_check_config_flattened_error_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_uptime_check_config( + uptime_service.GetUptimeCheckConfigRequest(), + name='name_value', + ) + + +def test_create_uptime_check_config(transport: str = 'grpc', request_type=uptime_service.CreateUptimeCheckConfigRequest): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime.UptimeCheckConfig( + name='name_value', + display_name='display_name_value', + selected_regions=[uptime.UptimeCheckRegion.USA], + is_internal=True, + monitored_resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + http_check=uptime.UptimeCheckConfig.HttpCheck(request_method=uptime.UptimeCheckConfig.HttpCheck.RequestMethod.GET), + ) + response = client.create_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.CreateUptimeCheckConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, uptime.UptimeCheckConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.selected_regions == [uptime.UptimeCheckRegion.USA] + assert response.is_internal is True + + +def test_create_uptime_check_config_from_dict(): + test_create_uptime_check_config(request_type=dict) + + +def test_create_uptime_check_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_uptime_check_config), + '__call__') as call: + client.create_uptime_check_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.CreateUptimeCheckConfigRequest() + + +@pytest.mark.asyncio +async def test_create_uptime_check_config_async(transport: str = 'grpc_asyncio', request_type=uptime_service.CreateUptimeCheckConfigRequest): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig( + name='name_value', + display_name='display_name_value', + selected_regions=[uptime.UptimeCheckRegion.USA], + is_internal=True, + )) + response = await client.create_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.CreateUptimeCheckConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, uptime.UptimeCheckConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.selected_regions == [uptime.UptimeCheckRegion.USA] + assert response.is_internal is True + + +@pytest.mark.asyncio +async def test_create_uptime_check_config_async_from_dict(): + await test_create_uptime_check_config_async(request_type=dict) + + +def test_create_uptime_check_config_field_headers(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.CreateUptimeCheckConfigRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_uptime_check_config), + '__call__') as call: + call.return_value = uptime.UptimeCheckConfig() + client.create_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_uptime_check_config_field_headers_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.CreateUptimeCheckConfigRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_uptime_check_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) + await client.create_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_uptime_check_config_flattened(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime.UptimeCheckConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_uptime_check_config( + parent='parent_value', + uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].uptime_check_config == uptime.UptimeCheckConfig(name='name_value') + + +def test_create_uptime_check_config_flattened_error(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_uptime_check_config( + uptime_service.CreateUptimeCheckConfigRequest(), + parent='parent_value', + uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_uptime_check_config_flattened_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime.UptimeCheckConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_uptime_check_config( + parent='parent_value', + uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].uptime_check_config == uptime.UptimeCheckConfig(name='name_value') + + +@pytest.mark.asyncio +async def test_create_uptime_check_config_flattened_error_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_uptime_check_config( + uptime_service.CreateUptimeCheckConfigRequest(), + parent='parent_value', + uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), + ) + + +def test_update_uptime_check_config(transport: str = 'grpc', request_type=uptime_service.UpdateUptimeCheckConfigRequest): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime.UptimeCheckConfig( + name='name_value', + display_name='display_name_value', + selected_regions=[uptime.UptimeCheckRegion.USA], + is_internal=True, + monitored_resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + http_check=uptime.UptimeCheckConfig.HttpCheck(request_method=uptime.UptimeCheckConfig.HttpCheck.RequestMethod.GET), + ) + response = client.update_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.UpdateUptimeCheckConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, uptime.UptimeCheckConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.selected_regions == [uptime.UptimeCheckRegion.USA] + assert response.is_internal is True + + +def test_update_uptime_check_config_from_dict(): + test_update_uptime_check_config(request_type=dict) + + +def test_update_uptime_check_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_uptime_check_config), + '__call__') as call: + client.update_uptime_check_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.UpdateUptimeCheckConfigRequest() + + +@pytest.mark.asyncio +async def test_update_uptime_check_config_async(transport: str = 'grpc_asyncio', request_type=uptime_service.UpdateUptimeCheckConfigRequest): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig( + name='name_value', + display_name='display_name_value', + selected_regions=[uptime.UptimeCheckRegion.USA], + is_internal=True, + )) + response = await client.update_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.UpdateUptimeCheckConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, uptime.UptimeCheckConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.selected_regions == [uptime.UptimeCheckRegion.USA] + assert response.is_internal is True + + +@pytest.mark.asyncio +async def test_update_uptime_check_config_async_from_dict(): + await test_update_uptime_check_config_async(request_type=dict) + + +def test_update_uptime_check_config_field_headers(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.UpdateUptimeCheckConfigRequest() + + request.uptime_check_config.name = 'uptime_check_config.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_uptime_check_config), + '__call__') as call: + call.return_value = uptime.UptimeCheckConfig() + client.update_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'uptime_check_config.name=uptime_check_config.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_uptime_check_config_field_headers_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.UpdateUptimeCheckConfigRequest() + + request.uptime_check_config.name = 'uptime_check_config.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_uptime_check_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) + await client.update_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'uptime_check_config.name=uptime_check_config.name/value', + ) in kw['metadata'] + + +def test_update_uptime_check_config_flattened(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime.UptimeCheckConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_uptime_check_config( + uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].uptime_check_config == uptime.UptimeCheckConfig(name='name_value') + + +def test_update_uptime_check_config_flattened_error(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_uptime_check_config( + uptime_service.UpdateUptimeCheckConfigRequest(), + uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_update_uptime_check_config_flattened_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime.UptimeCheckConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_uptime_check_config( + uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].uptime_check_config == uptime.UptimeCheckConfig(name='name_value') + + +@pytest.mark.asyncio +async def test_update_uptime_check_config_flattened_error_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_uptime_check_config( + uptime_service.UpdateUptimeCheckConfigRequest(), + uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), + ) + + +def test_delete_uptime_check_config(transport: str = 'grpc', request_type=uptime_service.DeleteUptimeCheckConfigRequest): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.DeleteUptimeCheckConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_uptime_check_config_from_dict(): + test_delete_uptime_check_config(request_type=dict) + + +def test_delete_uptime_check_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_uptime_check_config), + '__call__') as call: + client.delete_uptime_check_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.DeleteUptimeCheckConfigRequest() + + +@pytest.mark.asyncio +async def test_delete_uptime_check_config_async(transport: str = 'grpc_asyncio', request_type=uptime_service.DeleteUptimeCheckConfigRequest): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.DeleteUptimeCheckConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_uptime_check_config_async_from_dict(): + await test_delete_uptime_check_config_async(request_type=dict) + + +def test_delete_uptime_check_config_field_headers(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.DeleteUptimeCheckConfigRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_uptime_check_config), + '__call__') as call: + call.return_value = None + client.delete_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_uptime_check_config_field_headers_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = uptime_service.DeleteUptimeCheckConfigRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_uptime_check_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_uptime_check_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_uptime_check_config_flattened(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_uptime_check_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_uptime_check_config_flattened_error(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_uptime_check_config( + uptime_service.DeleteUptimeCheckConfigRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_uptime_check_config_flattened_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_uptime_check_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_uptime_check_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_uptime_check_config_flattened_error_async(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_uptime_check_config( + uptime_service.DeleteUptimeCheckConfigRequest(), + name='name_value', + ) + + +def test_list_uptime_check_ips(transport: str = 'grpc', request_type=uptime_service.ListUptimeCheckIpsRequest): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = uptime_service.ListUptimeCheckIpsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_uptime_check_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.ListUptimeCheckIpsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUptimeCheckIpsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_uptime_check_ips_from_dict(): + test_list_uptime_check_ips(request_type=dict) + + +def test_list_uptime_check_ips_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_ips), + '__call__') as call: + client.list_uptime_check_ips() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.ListUptimeCheckIpsRequest() + + +@pytest.mark.asyncio +async def test_list_uptime_check_ips_async(transport: str = 'grpc_asyncio', request_type=uptime_service.ListUptimeCheckIpsRequest): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime_service.ListUptimeCheckIpsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_uptime_check_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == uptime_service.ListUptimeCheckIpsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUptimeCheckIpsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_uptime_check_ips_async_from_dict(): + await test_list_uptime_check_ips_async(request_type=dict) + + +def test_list_uptime_check_ips_pager(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_ips), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + ], + next_page_token='abc', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[], + next_page_token='def', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + ], + next_page_token='ghi', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_uptime_check_ips(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, uptime.UptimeCheckIp) + for i in results) + +def test_list_uptime_check_ips_pages(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_ips), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + ], + next_page_token='abc', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[], + next_page_token='def', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + ], + next_page_token='ghi', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + ], + ), + RuntimeError, + ) + pages = list(client.list_uptime_check_ips(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_uptime_check_ips_async_pager(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_ips), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + ], + next_page_token='abc', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[], + next_page_token='def', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + ], + next_page_token='ghi', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_uptime_check_ips(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, uptime.UptimeCheckIp) + for i in responses) + +@pytest.mark.asyncio +async def test_list_uptime_check_ips_async_pages(): + client = UptimeCheckServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_uptime_check_ips), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + ], + next_page_token='abc', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[], + next_page_token='def', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + ], + next_page_token='ghi', + ), + uptime_service.ListUptimeCheckIpsResponse( + uptime_check_ips=[ + uptime.UptimeCheckIp(), + uptime.UptimeCheckIp(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_uptime_check_ips(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.UptimeCheckServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.UptimeCheckServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UptimeCheckServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.UptimeCheckServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UptimeCheckServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.UptimeCheckServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = UptimeCheckServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.UptimeCheckServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.UptimeCheckServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.UptimeCheckServiceGrpcTransport, + transports.UptimeCheckServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.UptimeCheckServiceGrpcTransport, + ) + +def test_uptime_check_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.UptimeCheckServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_uptime_check_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.UptimeCheckServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_uptime_check_configs', + 'get_uptime_check_config', + 'create_uptime_check_config', + 'update_uptime_check_config', + 'delete_uptime_check_config', + 'list_uptime_check_ips', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_uptime_check_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.UptimeCheckServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_uptime_check_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.UptimeCheckServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', + ), + quota_project_id="octopus", + ) + + +def test_uptime_check_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.UptimeCheckServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_uptime_check_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + UptimeCheckServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_uptime_check_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + UptimeCheckServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.UptimeCheckServiceGrpcTransport, + transports.UptimeCheckServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_uptime_check_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.UptimeCheckServiceGrpcTransport, + transports.UptimeCheckServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_uptime_check_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.UptimeCheckServiceGrpcTransport, grpc_helpers), + (transports.UptimeCheckServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_uptime_check_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "monitoring.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/monitoring.read', +), + scopes=["1", "2"], + default_host="monitoring.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.UptimeCheckServiceGrpcTransport, transports.UptimeCheckServiceGrpcAsyncIOTransport]) +def test_uptime_check_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_uptime_check_service_host_no_port(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), + ) + assert client.transport._host == 'monitoring.googleapis.com:443' + + +def test_uptime_check_service_host_with_port(): + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), + ) + assert client.transport._host == 'monitoring.googleapis.com:8000' + +def test_uptime_check_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.UptimeCheckServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_uptime_check_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.UptimeCheckServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.UptimeCheckServiceGrpcTransport, transports.UptimeCheckServiceGrpcAsyncIOTransport]) +def test_uptime_check_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.UptimeCheckServiceGrpcTransport, transports.UptimeCheckServiceGrpcAsyncIOTransport]) +def test_uptime_check_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_uptime_check_config_path(): + project = "squid" + uptime_check_config = "clam" + expected = "projects/{project}/uptimeCheckConfigs/{uptime_check_config}".format(project=project, uptime_check_config=uptime_check_config, ) + actual = UptimeCheckServiceClient.uptime_check_config_path(project, uptime_check_config) + assert expected == actual + + +def test_parse_uptime_check_config_path(): + expected = { + "project": "whelk", + "uptime_check_config": "octopus", + } + path = UptimeCheckServiceClient.uptime_check_config_path(**expected) + + # Check that the path construction is reversible. + actual = UptimeCheckServiceClient.parse_uptime_check_config_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = UptimeCheckServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = UptimeCheckServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = UptimeCheckServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = UptimeCheckServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = UptimeCheckServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = UptimeCheckServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = UptimeCheckServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = UptimeCheckServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = UptimeCheckServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = UptimeCheckServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = UptimeCheckServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = UptimeCheckServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = UptimeCheckServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = UptimeCheckServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = UptimeCheckServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.UptimeCheckServiceTransport, '_prep_wrapped_messages') as prep: + client = UptimeCheckServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.UptimeCheckServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = UptimeCheckServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) From 45e99684b25c402161c95e0e016e8663b922a091 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 26 Aug 2021 21:48:45 +0000 Subject: [PATCH 2/3] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/cloud/monitoring/__init__.py | 2 - google/cloud/monitoring_v3/__init__.py | 2 - .../alert_policy_service/async_client.py | 13 +- .../services/alert_policy_service/client.py | 10 +- .../alert_policy_service/transports/base.py | 3 - .../services/group_service/async_client.py | 19 +- .../services/group_service/client.py | 10 +- .../services/group_service/transports/base.py | 9 +- .../services/metric_service/async_client.py | 41 +- .../services/metric_service/client.py | 31 +- .../metric_service/transports/base.py | 10 +- .../async_client.py | 26 +- .../notification_channel_service/client.py | 19 +- .../transports/base.py | 7 - .../async_client.py | 16 +- .../service_monitoring_service/client.py | 10 +- .../transports/base.py | 6 - .../uptime_check_service/async_client.py | 15 +- .../services/uptime_check_service/client.py | 11 +- .../uptime_check_service/transports/base.py | 4 - google/cloud/monitoring_v3/types/__init__.py | 2 - google/cloud/monitoring_v3/types/alert.py | 74 + .../monitoring_v3/types/alert_service.py | 10 +- google/cloud/monitoring_v3/types/common.py | 21 +- .../monitoring_v3/types/group_service.py | 10 +- google/cloud/monitoring_v3/types/metric.py | 8 +- .../monitoring_v3/types/metric_service.py | 36 +- .../cloud/monitoring_v3/types/notification.py | 7 - .../types/notification_service.py | 17 +- google/cloud/monitoring_v3/types/service.py | 41 +- .../monitoring_v3/types/service_service.py | 10 +- .../cloud/monitoring_v3/types/span_context.py | 4 +- google/cloud/monitoring_v3/types/uptime.py | 6 +- .../monitoring_v3/types/uptime_service.py | 10 +- owl-bot-staging/v3/.coveragerc | 17 - owl-bot-staging/v3/MANIFEST.in | 2 - owl-bot-staging/v3/README.rst | 49 - owl-bot-staging/v3/docs/conf.py | 376 -- owl-bot-staging/v3/docs/index.rst | 7 - .../monitoring_v3/alert_policy_service.rst | 10 - .../v3/docs/monitoring_v3/group_service.rst | 10 - .../v3/docs/monitoring_v3/metric_service.rst | 10 - .../notification_channel_service.rst | 10 - .../v3/docs/monitoring_v3/query_service.rst | 10 - .../service_monitoring_service.rst | 10 - .../v3/docs/monitoring_v3/services.rst | 12 - .../v3/docs/monitoring_v3/types.rst | 7 - .../monitoring_v3/uptime_check_service.rst | 10 - .../v3/google/cloud/monitoring/__init__.py | 237 -- .../v3/google/cloud/monitoring/py.typed | 2 - .../v3/google/cloud/monitoring_v3/__init__.py | 238 -- .../cloud/monitoring_v3/gapic_metadata.json | 567 --- .../v3/google/cloud/monitoring_v3/py.typed | 2 - .../cloud/monitoring_v3/services/__init__.py | 15 - .../services/alert_policy_service/__init__.py | 22 - .../alert_policy_service/async_client.py | 673 --- .../services/alert_policy_service/client.py | 855 ---- .../services/alert_policy_service/pagers.py | 141 - .../transports/__init__.py | 33 - .../alert_policy_service/transports/base.py | 246 -- .../alert_policy_service/transports/grpc.py | 370 -- .../transports/grpc_asyncio.py | 374 -- .../services/group_service/__init__.py | 22 - .../services/group_service/async_client.py | 793 ---- .../services/group_service/client.py | 954 ----- .../services/group_service/pagers.py | 264 -- .../group_service/transports/__init__.py | 33 - .../services/group_service/transports/base.py | 273 -- .../services/group_service/transports/grpc.py | 398 -- .../group_service/transports/grpc_asyncio.py | 402 -- .../services/metric_service/__init__.py | 22 - .../services/metric_service/async_client.py | 967 ----- .../services/metric_service/client.py | 1140 ----- .../services/metric_service/pagers.py | 387 -- .../metric_service/transports/__init__.py | 33 - .../metric_service/transports/base.py | 308 -- .../metric_service/transports/grpc.py | 453 -- .../metric_service/transports/grpc_asyncio.py | 457 -- .../notification_channel_service/__init__.py | 22 - .../async_client.py | 1143 ----- .../notification_channel_service/client.py | 1301 ------ .../notification_channel_service/pagers.py | 263 -- .../transports/__init__.py | 33 - .../transports/base.py | 340 -- .../transports/grpc.py | 538 --- .../transports/grpc_asyncio.py | 542 --- .../services/query_service/__init__.py | 22 - .../services/query_service/async_client.py | 231 - .../services/query_service/client.py | 414 -- .../services/query_service/pagers.py | 141 - .../query_service/transports/__init__.py | 33 - .../services/query_service/transports/base.py | 170 - .../services/query_service/transports/grpc.py | 255 -- .../query_service/transports/grpc_asyncio.py | 259 -- .../service_monitoring_service/__init__.py | 22 - .../async_client.py | 1061 ----- .../service_monitoring_service/client.py | 1225 ------ .../service_monitoring_service/pagers.py | 263 -- .../transports/__init__.py | 33 - .../transports/base.py | 335 -- .../transports/grpc.py | 492 --- .../transports/grpc_asyncio.py | 496 --- .../services/uptime_check_service/__init__.py | 22 - .../uptime_check_service/async_client.py | 686 --- .../services/uptime_check_service/client.py | 854 ---- .../services/uptime_check_service/pagers.py | 263 -- .../transports/__init__.py | 33 - .../uptime_check_service/transports/base.py | 266 -- .../uptime_check_service/transports/grpc.py | 401 -- .../transports/grpc_asyncio.py | 405 -- .../cloud/monitoring_v3/types/__init__.py | 240 -- .../google/cloud/monitoring_v3/types/alert.py | 654 --- .../monitoring_v3/types/alert_service.py | 257 -- .../cloud/monitoring_v3/types/common.py | 333 -- .../monitoring_v3/types/dropped_labels.py | 59 - .../google/cloud/monitoring_v3/types/group.py | 112 - .../monitoring_v3/types/group_service.py | 346 -- .../cloud/monitoring_v3/types/metric.py | 417 -- .../monitoring_v3/types/metric_service.py | 665 --- .../monitoring_v3/types/mutation_record.py | 50 - .../cloud/monitoring_v3/types/notification.py | 256 -- .../types/notification_service.py | 445 -- .../monitoring_v3/types/query_service.py | 25 - .../cloud/monitoring_v3/types/service.py | 775 ---- .../monitoring_v3/types/service_service.py | 416 -- .../cloud/monitoring_v3/types/span_context.py | 61 - .../cloud/monitoring_v3/types/uptime.py | 538 --- .../monitoring_v3/types/uptime_service.py | 269 -- owl-bot-staging/v3/mypy.ini | 3 - owl-bot-staging/v3/noxfile.py | 132 - .../scripts/fixup_monitoring_v3_keywords.py | 221 - owl-bot-staging/v3/setup.py | 54 - owl-bot-staging/v3/tests/__init__.py | 16 - owl-bot-staging/v3/tests/unit/__init__.py | 16 - .../v3/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/monitoring_v3/__init__.py | 16 - .../test_alert_policy_service.py | 2365 ----------- .../gapic/monitoring_v3/test_group_service.py | 2765 ------------ .../monitoring_v3/test_metric_service.py | 3511 --------------- .../test_notification_channel_service.py | 3757 ----------------- .../gapic/monitoring_v3/test_query_service.py | 1264 ------ .../test_service_monitoring_service.py | 3703 ---------------- .../test_uptime_check_service.py | 2610 ------------ .../test_notification_channel_service.py | 5 - 144 files changed, 285 insertions(+), 50392 deletions(-) delete mode 100644 owl-bot-staging/v3/.coveragerc delete mode 100644 owl-bot-staging/v3/MANIFEST.in delete mode 100644 owl-bot-staging/v3/README.rst delete mode 100644 owl-bot-staging/v3/docs/conf.py delete mode 100644 owl-bot-staging/v3/docs/index.rst delete mode 100644 owl-bot-staging/v3/docs/monitoring_v3/alert_policy_service.rst delete mode 100644 owl-bot-staging/v3/docs/monitoring_v3/group_service.rst delete mode 100644 owl-bot-staging/v3/docs/monitoring_v3/metric_service.rst delete mode 100644 owl-bot-staging/v3/docs/monitoring_v3/notification_channel_service.rst delete mode 100644 owl-bot-staging/v3/docs/monitoring_v3/query_service.rst delete mode 100644 owl-bot-staging/v3/docs/monitoring_v3/service_monitoring_service.rst delete mode 100644 owl-bot-staging/v3/docs/monitoring_v3/services.rst delete mode 100644 owl-bot-staging/v3/docs/monitoring_v3/types.rst delete mode 100644 owl-bot-staging/v3/docs/monitoring_v3/uptime_check_service.rst delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring/py.typed delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/gapic_metadata.json delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/py.typed delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/pagers.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/async_client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/pagers.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/base.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/async_client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/pagers.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/base.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/pagers.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/async_client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/pagers.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/base.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/pagers.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/client.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/pagers.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/__init__.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert_service.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/common.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/dropped_labels.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/group.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/group_service.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric_service.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/mutation_record.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification_service.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/query_service.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/service.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/service_service.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/span_context.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime.py delete mode 100644 owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime_service.py delete mode 100644 owl-bot-staging/v3/mypy.ini delete mode 100644 owl-bot-staging/v3/noxfile.py delete mode 100644 owl-bot-staging/v3/scripts/fixup_monitoring_v3_keywords.py delete mode 100644 owl-bot-staging/v3/setup.py delete mode 100644 owl-bot-staging/v3/tests/__init__.py delete mode 100644 owl-bot-staging/v3/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v3/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/__init__.py delete mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py delete mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_group_service.py delete mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_metric_service.py delete mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py delete mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_query_service.py delete mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py delete mode 100644 owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py diff --git a/google/cloud/monitoring/__init__.py b/google/cloud/monitoring/__init__.py index 60f07f2d..a95bc456 100644 --- a/google/cloud/monitoring/__init__.py +++ b/google/cloud/monitoring/__init__.py @@ -64,7 +64,6 @@ from google.cloud.monitoring_v3.types.common import TimeInterval from google.cloud.monitoring_v3.types.common import TypedValue from google.cloud.monitoring_v3.types.common import ComparisonType -from google.cloud.monitoring_v3.types.common import ServiceTier from google.cloud.monitoring_v3.types.dropped_labels import DroppedLabels from google.cloud.monitoring_v3.types.group import Group from google.cloud.monitoring_v3.types.group_service import CreateGroupRequest @@ -236,7 +235,6 @@ "TimeInterval", "TypedValue", "ComparisonType", - "ServiceTier", "DroppedLabels", "Group", "CreateGroupRequest", diff --git a/google/cloud/monitoring_v3/__init__.py b/google/cloud/monitoring_v3/__init__.py index 77f1d7b9..3b18dfdb 100644 --- a/google/cloud/monitoring_v3/__init__.py +++ b/google/cloud/monitoring_v3/__init__.py @@ -40,7 +40,6 @@ from .types.common import TimeInterval from .types.common import TypedValue from .types.common import ComparisonType -from .types.common import ServiceTier from .types.dropped_labels import DroppedLabels from .types.group import Group from .types.group_service import CreateGroupRequest @@ -216,7 +215,6 @@ "ServiceLevelIndicator", "ServiceLevelObjective", "ServiceMonitoringServiceClient", - "ServiceTier", "SpanContext", "TextLocator", "TimeInterval", diff --git a/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py b/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py index 04b2608e..3a838051 100644 --- a/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py +++ b/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py @@ -201,8 +201,9 @@ async def list_alert_policies( The request object. The protocol for the `ListAlertPolicies` request. name (:class:`str`): - Required. The project whose alert policies are to be - listed. The format is: + Required. The + `project `__ + whose alert policies are to be listed. The format is: :: @@ -257,7 +258,6 @@ async def list_alert_policies( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -351,7 +351,6 @@ async def get_alert_policy( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -389,8 +388,9 @@ async def create_alert_policy( The request object. The protocol for the `CreateAlertPolicy` request. name (:class:`str`): - Required. The project in which to create the alerting - policy. The format is: + Required. The + `project `__ + in which to create the alerting policy. The format is: :: @@ -532,7 +532,6 @@ async def delete_alert_policy( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/services/alert_policy_service/client.py b/google/cloud/monitoring_v3/services/alert_policy_service/client.py index ef6cb59c..f708227b 100644 --- a/google/cloud/monitoring_v3/services/alert_policy_service/client.py +++ b/google/cloud/monitoring_v3/services/alert_policy_service/client.py @@ -400,8 +400,9 @@ def list_alert_policies( The request object. The protocol for the `ListAlertPolicies` request. name (str): - Required. The project whose alert policies are to be - listed. The format is: + Required. The + `project `__ + whose alert policies are to be listed. The format is: :: @@ -568,8 +569,9 @@ def create_alert_policy( The request object. The protocol for the `CreateAlertPolicy` request. name (str): - Required. The project in which to create the alerting - policy. The format is: + Required. The + `project `__ + in which to create the alerting policy. The format is: :: diff --git a/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py b/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py index 5d483f4b..be14243f 100644 --- a/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py @@ -167,7 +167,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -182,7 +181,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -200,7 +198,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/services/group_service/async_client.py b/google/cloud/monitoring_v3/services/group_service/async_client.py index ae215e68..6ed9c9fc 100644 --- a/google/cloud/monitoring_v3/services/group_service/async_client.py +++ b/google/cloud/monitoring_v3/services/group_service/async_client.py @@ -187,8 +187,9 @@ async def list_groups( request (:class:`google.cloud.monitoring_v3.types.ListGroupsRequest`): The request object. The `ListGroup` request. name (:class:`str`): - Required. The project whose groups are to be listed. The - format is: + Required. The + `project `__ + whose groups are to be listed. The format is: :: @@ -237,7 +238,6 @@ async def list_groups( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -357,7 +357,6 @@ async def get_group( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -394,8 +393,9 @@ async def create_group( request (:class:`google.cloud.monitoring_v3.types.CreateGroupRequest`): The request object. The `CreateGroup` request. name (:class:`str`): - Required. The project in which to create the group. The - format is: + Required. The + `project `__ + in which to create the group. The format is: :: @@ -585,12 +585,11 @@ async def update_group( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), - deadline=30.0, + deadline=180.0, ), - default_timeout=30.0, + default_timeout=180.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -666,7 +665,6 @@ async def delete_group( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -752,7 +750,6 @@ async def list_group_members( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/services/group_service/client.py b/google/cloud/monitoring_v3/services/group_service/client.py index 29bf1c21..f5cd5c31 100644 --- a/google/cloud/monitoring_v3/services/group_service/client.py +++ b/google/cloud/monitoring_v3/services/group_service/client.py @@ -373,8 +373,9 @@ def list_groups( request (google.cloud.monitoring_v3.types.ListGroupsRequest): The request object. The `ListGroup` request. name (str): - Required. The project whose groups are to be listed. The - format is: + Required. The + `project `__ + whose groups are to be listed. The format is: :: @@ -560,8 +561,9 @@ def create_group( request (google.cloud.monitoring_v3.types.CreateGroupRequest): The request object. The `CreateGroup` request. name (str): - Required. The project in which to create the group. The - format is: + Required. The + `project `__ + in which to create the group. The format is: :: diff --git a/google/cloud/monitoring_v3/services/group_service/transports/base.py b/google/cloud/monitoring_v3/services/group_service/transports/base.py index 2e431a06..25626df4 100644 --- a/google/cloud/monitoring_v3/services/group_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/group_service/transports/base.py @@ -168,7 +168,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -183,7 +182,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -201,12 +199,11 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), - deadline=30.0, + deadline=180.0, ), - default_timeout=30.0, + default_timeout=180.0, client_info=client_info, ), self.delete_group: gapic_v1.method.wrap_method( @@ -216,7 +213,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -231,7 +227,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/services/metric_service/async_client.py b/google/cloud/monitoring_v3/services/metric_service/async_client.py index cc75b672..93647bda 100644 --- a/google/cloud/monitoring_v3/services/metric_service/async_client.py +++ b/google/cloud/monitoring_v3/services/metric_service/async_client.py @@ -194,8 +194,9 @@ async def list_monitored_resource_descriptors( The request object. The `ListMonitoredResourceDescriptors` request. name (:class:`str`): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -244,7 +245,6 @@ async def list_monitored_resource_descriptors( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -348,7 +348,6 @@ async def get_monitored_resource_descriptor( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -385,8 +384,9 @@ async def list_metric_descriptors( request (:class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest`): The request object. The `ListMetricDescriptors` request. name (:class:`str`): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -435,7 +435,6 @@ async def list_metric_descriptors( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -532,7 +531,6 @@ async def get_metric_descriptor( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -572,12 +570,10 @@ async def create_metric_descriptor( The request object. The `CreateMetricDescriptor` request. name (:class:`str`): - Required. The project on which to execute the request. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] + Required. The + `project `__ + on which to execute the request. The format is: 4 + projects/[PROJECT_ID_OR_NUMBER] This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -707,7 +703,6 @@ async def delete_metric_descriptor( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -746,8 +741,10 @@ async def list_time_series( request (:class:`google.cloud.monitoring_v3.types.ListTimeSeriesRequest`): The request object. The `ListTimeSeries` request. name (:class:`str`): - Required. The project, organization or folder on which - to execute the request. The format is: + Required. The + `project `__, + organization or folder on which to execute the request. + The format is: :: @@ -837,12 +834,11 @@ async def list_time_series( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), - deadline=30.0, + deadline=90.0, ), - default_timeout=30.0, + default_timeout=90.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -884,8 +880,9 @@ async def create_time_series( request (:class:`google.cloud.monitoring_v3.types.CreateTimeSeriesRequest`): The request object. The `CreateTimeSeries` request. name (:class:`str`): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: diff --git a/google/cloud/monitoring_v3/services/metric_service/client.py b/google/cloud/monitoring_v3/services/metric_service/client.py index 5e37306e..383d045f 100644 --- a/google/cloud/monitoring_v3/services/metric_service/client.py +++ b/google/cloud/monitoring_v3/services/metric_service/client.py @@ -402,8 +402,9 @@ def list_monitored_resource_descriptors( The request object. The `ListMonitoredResourceDescriptors` request. name (str): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -581,8 +582,9 @@ def list_metric_descriptors( request (google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest): The request object. The `ListMetricDescriptors` request. name (str): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -748,12 +750,10 @@ def create_metric_descriptor( The request object. The `CreateMetricDescriptor` request. name (str): - Required. The project on which to execute the request. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] + Required. The + `project `__ + on which to execute the request. The format is: 4 + projects/[PROJECT_ID_OR_NUMBER] This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -912,8 +912,10 @@ def list_time_series( request (google.cloud.monitoring_v3.types.ListTimeSeriesRequest): The request object. The `ListTimeSeries` request. name (str): - Required. The project, organization or folder on which - to execute the request. The format is: + Required. The + `project `__, + organization or folder on which to execute the request. + The format is: :: @@ -1040,8 +1042,9 @@ def create_time_series( request (google.cloud.monitoring_v3.types.CreateTimeSeriesRequest): The request object. The `CreateTimeSeries` request. name (str): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: diff --git a/google/cloud/monitoring_v3/services/metric_service/transports/base.py b/google/cloud/monitoring_v3/services/metric_service/transports/base.py index 4db87cfc..a5d847c8 100644 --- a/google/cloud/monitoring_v3/services/metric_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/metric_service/transports/base.py @@ -169,7 +169,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -184,7 +183,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -199,7 +197,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -214,7 +211,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -234,7 +230,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -249,12 +244,11 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), - deadline=30.0, + deadline=90.0, ), - default_timeout=30.0, + default_timeout=90.0, client_info=client_info, ), self.create_time_series: gapic_v1.method.wrap_method( diff --git a/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py b/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py index 9f364a55..a475eeed 100644 --- a/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py +++ b/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py @@ -29,7 +29,6 @@ from google.api import label_pb2 # type: ignore from google.api import launch_stage_pb2 # type: ignore from google.cloud.monitoring_v3.services.notification_channel_service import pagers -from google.cloud.monitoring_v3.types import common from google.cloud.monitoring_v3.types import mutation_record from google.cloud.monitoring_v3.types import notification from google.cloud.monitoring_v3.types import notification_service @@ -214,9 +213,11 @@ async def list_notification_channel_descriptors( projects/[PROJECT_ID_OR_NUMBER] - Note that this names the parent container in which to - look for the descriptors; to retrieve a single - descriptor by name, use the + Note that this + `names `__ + the parent container in which to look for the + descriptors; to retrieve a single descriptor by name, + use the [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] operation, instead. @@ -265,7 +266,6 @@ async def list_notification_channel_descriptors( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -361,7 +361,6 @@ async def get_notification_channel_descriptor( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -399,8 +398,9 @@ async def list_notification_channels( The request object. The `ListNotificationChannels` request. name (:class:`str`): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -456,7 +456,6 @@ async def list_notification_channels( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -558,7 +557,6 @@ async def get_notification_channel( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -598,8 +596,9 @@ async def create_notification_channel( The request object. The `CreateNotificationChannel` request. name (:class:`str`): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -842,7 +841,6 @@ async def delete_notification_channel( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -1021,7 +1019,6 @@ async def get_notification_channel_verification_code( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -1126,7 +1123,6 @@ async def verify_notification_channel( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/services/notification_channel_service/client.py b/google/cloud/monitoring_v3/services/notification_channel_service/client.py index dda00689..9b095a75 100644 --- a/google/cloud/monitoring_v3/services/notification_channel_service/client.py +++ b/google/cloud/monitoring_v3/services/notification_channel_service/client.py @@ -33,7 +33,6 @@ from google.api import label_pb2 # type: ignore from google.api import launch_stage_pb2 # type: ignore from google.cloud.monitoring_v3.services.notification_channel_service import pagers -from google.cloud.monitoring_v3.types import common from google.cloud.monitoring_v3.types import mutation_record from google.cloud.monitoring_v3.types import notification from google.cloud.monitoring_v3.types import notification_service @@ -406,9 +405,11 @@ def list_notification_channel_descriptors( projects/[PROJECT_ID_OR_NUMBER] - Note that this names the parent container in which to - look for the descriptors; to retrieve a single - descriptor by name, use the + Note that this + `names `__ + the parent container in which to look for the + descriptors; to retrieve a single descriptor by name, + use the [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] operation, instead. @@ -581,8 +582,9 @@ def list_notification_channels( The request object. The `ListNotificationChannels` request. name (str): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -764,8 +766,9 @@ def create_notification_channel( The request object. The `CreateNotificationChannel` request. name (str): - Required. The project on which to execute the request. - The format is: + Required. The + `project `__ + on which to execute the request. The format is: :: diff --git a/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py b/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py index 66dc68c4..ae246a53 100644 --- a/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py @@ -167,7 +167,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -182,7 +181,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -197,7 +195,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -212,7 +209,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -237,7 +233,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -257,7 +252,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -272,7 +266,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py b/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py index fc940793..012dd79f 100644 --- a/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py +++ b/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py @@ -196,8 +196,9 @@ async def create_service( request (:class:`google.cloud.monitoring_v3.types.CreateServiceRequest`): The request object. The `CreateService` request. parent (:class:`str`): - Required. Resource name of the parent workspace. The - format is: + Required. Resource + `name `__ + of the parent workspace. The format is: :: @@ -333,7 +334,6 @@ async def get_service( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -370,8 +370,9 @@ async def list_services( The request object. The `ListServices` request. parent (:class:`str`): Required. Resource name of the parent containing the - listed services, either a project or a Monitoring - Workspace. The formats are: + listed services, either a + `project `__ + or a Monitoring Workspace. The formats are: :: @@ -421,7 +422,6 @@ async def list_services( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -581,7 +581,6 @@ async def delete_service( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -773,7 +772,6 @@ async def get_service_level_objective( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -862,7 +860,6 @@ async def list_service_level_objectives( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -1037,7 +1034,6 @@ async def delete_service_level_objective( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/services/service_monitoring_service/client.py b/google/cloud/monitoring_v3/services/service_monitoring_service/client.py index dbb7658b..268165a2 100644 --- a/google/cloud/monitoring_v3/services/service_monitoring_service/client.py +++ b/google/cloud/monitoring_v3/services/service_monitoring_service/client.py @@ -393,8 +393,9 @@ def create_service( request (google.cloud.monitoring_v3.types.CreateServiceRequest): The request object. The `CreateService` request. parent (str): - Required. Resource name of the parent workspace. The - format is: + Required. Resource + `name `__ + of the parent workspace. The format is: :: @@ -557,8 +558,9 @@ def list_services( The request object. The `ListServices` request. parent (str): Required. Resource name of the parent containing the - listed services, either a project or a Monitoring - Workspace. The formats are: + listed services, either a + `project `__ + or a Monitoring Workspace. The formats are: :: diff --git a/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py b/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py index d10ef7a1..83cfc069 100644 --- a/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py @@ -171,7 +171,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -186,7 +185,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -204,7 +202,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -224,7 +221,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -239,7 +235,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -259,7 +254,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py b/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py index 5cf92c80..cb7c6966 100644 --- a/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py +++ b/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py @@ -195,8 +195,10 @@ async def list_uptime_check_configs( The request object. The protocol for the `ListUptimeCheckConfigs` request. parent (:class:`str`): - Required. The project whose Uptime check configurations - are listed. The format is: + Required. The + `project `__ + whose Uptime check configurations are listed. The format + is: :: @@ -245,7 +247,6 @@ async def list_uptime_check_configs( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -337,7 +338,6 @@ async def get_uptime_check_config( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -375,8 +375,9 @@ async def create_uptime_check_config( The request object. The protocol for the `CreateUptimeCheckConfig` request. parent (:class:`str`): - Required. The project in which to create the Uptime - check. The format is: + Required. The + `project `__ + in which to create the Uptime check. The format is: :: @@ -594,7 +595,6 @@ async def delete_uptime_check_config( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -655,7 +655,6 @@ async def list_uptime_check_ips( maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/services/uptime_check_service/client.py b/google/cloud/monitoring_v3/services/uptime_check_service/client.py index 055b017c..0758d097 100644 --- a/google/cloud/monitoring_v3/services/uptime_check_service/client.py +++ b/google/cloud/monitoring_v3/services/uptime_check_service/client.py @@ -381,8 +381,10 @@ def list_uptime_check_configs( The request object. The protocol for the `ListUptimeCheckConfigs` request. parent (str): - Required. The project whose Uptime check configurations - are listed. The format is: + Required. The + `project `__ + whose Uptime check configurations are listed. The format + is: :: @@ -543,8 +545,9 @@ def create_uptime_check_config( The request object. The protocol for the `CreateUptimeCheckConfig` request. parent (str): - Required. The project in which to create the Uptime - check. The format is: + Required. The + `project `__ + in which to create the Uptime check. The format is: :: diff --git a/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py b/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py index 8a1e95f7..916fa344 100644 --- a/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py @@ -167,7 +167,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -182,7 +181,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -207,7 +205,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, @@ -222,7 +219,6 @@ def _prep_wrapped_messages(self, client_info): maximum=30.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), deadline=30.0, diff --git a/google/cloud/monitoring_v3/types/__init__.py b/google/cloud/monitoring_v3/types/__init__.py index 427f3a20..580e317a 100644 --- a/google/cloud/monitoring_v3/types/__init__.py +++ b/google/cloud/monitoring_v3/types/__init__.py @@ -27,7 +27,6 @@ TimeInterval, TypedValue, ComparisonType, - ServiceTier, ) from .dropped_labels import DroppedLabels from .group import Group @@ -144,7 +143,6 @@ "TimeInterval", "TypedValue", "ComparisonType", - "ServiceTier", "DroppedLabels", "Group", "CreateGroupRequest", diff --git a/google/cloud/monitoring_v3/types/alert.py b/google/cloud/monitoring_v3/types/alert.py index 74d67610..4c7b9989 100644 --- a/google/cloud/monitoring_v3/types/alert.py +++ b/google/cloud/monitoring_v3/types/alert.py @@ -120,6 +120,9 @@ class AlertPolicy(proto.Message): A read-only record of the most recent change to the alerting policy. If provided in a call to create or update, this field will be ignored. + alert_strategy (google.cloud.monitoring_v3.types.AlertPolicy.AlertStrategy): + Control over how this alert policy's + notification channels are notified. """ class ConditionCombinerType(proto.Enum): @@ -197,6 +200,10 @@ class Condition(proto.Message): condition_absent (google.cloud.monitoring_v3.types.AlertPolicy.Condition.MetricAbsence): A condition that checks that a time series continues to receive new data points. + condition_matched_log (google.cloud.monitoring_v3.types.AlertPolicy.Condition.LogMatch): + A condition that checks for log messages + matching given constraints. If set, no other + conditions can be present. condition_monitoring_query_language (google.cloud.monitoring_v3.types.AlertPolicy.Condition.MonitoringQueryLanguageCondition): A condition that uses the Monitoring Query Language to define alerts. @@ -388,6 +395,35 @@ class MetricAbsence(proto.Message): proto.MESSAGE, number=3, message="AlertPolicy.Condition.Trigger", ) + class LogMatch(proto.Message): + r"""A condition type that checks whether a log message in the `scoping + project `__ + satisfies the given filter. Logs from other projects in the metrics + scope are not evaluated. + + Attributes: + filter (str): + Required. A logs-based filter. See `Advanced Logs + Queries `__ for how + this filter should be constructed. + label_extractors (Sequence[google.cloud.monitoring_v3.types.AlertPolicy.Condition.LogMatch.LabelExtractorsEntry]): + Optional. A map from a label key to an extractor expression, + which is used to extract the value for this label key. Each + entry in this map is a specification for how data should be + extracted from log entries that match ``filter``. Each + combination of extracted values is treated as a separate + rule for the purposes of triggering notifications. Label + keys and corresponding values can be used in notifications + generated by this condition. + + Please see `the documentation on logs-based metric + ``valueExtractor``\ s `__ + for syntax and examples. + """ + + filter = proto.Field(proto.STRING, number=1,) + label_extractors = proto.MapField(proto.STRING, proto.STRING, number=2,) + class MonitoringQueryLanguageCondition(proto.Message): r"""A condition type that allows alert policies to be defined using `Monitoring Query @@ -442,6 +478,12 @@ class MonitoringQueryLanguageCondition(proto.Message): oneof="condition", message="AlertPolicy.Condition.MetricAbsence", ) + condition_matched_log = proto.Field( + proto.MESSAGE, + number=20, + oneof="condition", + message="AlertPolicy.Condition.LogMatch", + ) condition_monitoring_query_language = proto.Field( proto.MESSAGE, number=19, @@ -449,6 +491,37 @@ class MonitoringQueryLanguageCondition(proto.Message): message="AlertPolicy.Condition.MonitoringQueryLanguageCondition", ) + class AlertStrategy(proto.Message): + r"""Control over how the notification channels in + ``notification_channels`` are notified when this alert fires. + + Attributes: + notification_rate_limit (google.cloud.monitoring_v3.types.AlertPolicy.AlertStrategy.NotificationRateLimit): + Required for alert policies with a ``LogMatch`` condition. + + This limit is not implemented for alert policies that are + not log-based. + """ + + class NotificationRateLimit(proto.Message): + r"""Control over the rate of notifications sent to this alert + policy's notification channels. + + Attributes: + period (google.protobuf.duration_pb2.Duration): + Not more than one notification per ``period``. + """ + + period = proto.Field( + proto.MESSAGE, number=1, message=duration_pb2.Duration, + ) + + notification_rate_limit = proto.Field( + proto.MESSAGE, + number=1, + message="AlertPolicy.AlertStrategy.NotificationRateLimit", + ) + name = proto.Field(proto.STRING, number=1,) display_name = proto.Field(proto.STRING, number=2,) documentation = proto.Field(proto.MESSAGE, number=13, message=Documentation,) @@ -464,6 +537,7 @@ class MonitoringQueryLanguageCondition(proto.Message): mutation_record = proto.Field( proto.MESSAGE, number=11, message=gm_mutation_record.MutationRecord, ) + alert_strategy = proto.Field(proto.MESSAGE, number=21, message=AlertStrategy,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_v3/types/alert_service.py b/google/cloud/monitoring_v3/types/alert_service.py index 531ed7c9..7ee6b820 100644 --- a/google/cloud/monitoring_v3/types/alert_service.py +++ b/google/cloud/monitoring_v3/types/alert_service.py @@ -36,8 +36,9 @@ class CreateAlertPolicyRequest(proto.Message): r"""The protocol for the ``CreateAlertPolicy`` request. Attributes: name (str): - Required. The project in which to create the alerting - policy. The format is: + Required. The + `project `__ + in which to create the alerting policy. The format is: :: @@ -79,8 +80,9 @@ class ListAlertPoliciesRequest(proto.Message): r"""The protocol for the ``ListAlertPolicies`` request. Attributes: name (str): - Required. The project whose alert policies are to be listed. - The format is: + Required. The + `project `__ + whose alert policies are to be listed. The format is: :: diff --git a/google/cloud/monitoring_v3/types/common.py b/google/cloud/monitoring_v3/types/common.py index 363c393f..c5669f86 100644 --- a/google/cloud/monitoring_v3/types/common.py +++ b/google/cloud/monitoring_v3/types/common.py @@ -22,13 +22,7 @@ __protobuf__ = proto.module( package="google.monitoring.v3", - manifest={ - "ComparisonType", - "ServiceTier", - "TypedValue", - "TimeInterval", - "Aggregation", - }, + manifest={"ComparisonType", "TypedValue", "TimeInterval", "Aggregation",}, ) @@ -45,17 +39,6 @@ class ComparisonType(proto.Enum): COMPARISON_NE = 6 -class ServiceTier(proto.Enum): - r"""The tier of service for a Workspace. Please see the `service tiers - documentation `__ - for more details. - """ - _pb_options = {"deprecated": True} - SERVICE_TIER_UNSPECIFIED = 0 - SERVICE_TIER_BASIC = 1 - SERVICE_TIER_PREMIUM = 2 - - class TypedValue(proto.Message): r"""A single strongly-typed value. Attributes: @@ -88,7 +71,7 @@ class TimeInterval(proto.Message): r"""A closed time interval. It extends from the start time to the end time, and includes both: ``[startTime, endTime]``. Valid time intervals depend on the - ```MetricKind`` `__ + ```MetricKind`` `__ of the metric value. The end time must not be earlier than the start time. When writing data points, the start time must not be more than 25 hours in the past and the end time must not be more than five diff --git a/google/cloud/monitoring_v3/types/group_service.py b/google/cloud/monitoring_v3/types/group_service.py index ecd9241e..b450f470 100644 --- a/google/cloud/monitoring_v3/types/group_service.py +++ b/google/cloud/monitoring_v3/types/group_service.py @@ -39,8 +39,9 @@ class ListGroupsRequest(proto.Message): r"""The ``ListGroup`` request. Attributes: name (str): - Required. The project whose groups are to be listed. The - format is: + Required. The + `project `__ + whose groups are to be listed. The format is: :: @@ -134,8 +135,9 @@ class CreateGroupRequest(proto.Message): r"""The ``CreateGroup`` request. Attributes: name (str): - Required. The project in which to create the group. The - format is: + Required. The + `project `__ + in which to create the group. The format is: :: diff --git a/google/cloud/monitoring_v3/types/metric.py b/google/cloud/monitoring_v3/types/metric.py index 8c7d56ca..d364d76a 100644 --- a/google/cloud/monitoring_v3/types/metric.py +++ b/google/cloud/monitoring_v3/types/metric.py @@ -70,9 +70,11 @@ class TimeSeries(proto.Message): The associated metric. A fully-specified metric used to identify the time series. resource (google.api.monitored_resource_pb2.MonitoredResource): - The associated monitored resource. Custom - metrics can use only certain monitored resource - types in their time series data. + The associated monitored resource. Custom metrics can use + only certain monitored resource types in their time series + data. For more information, see `Monitored resources for + custom + metrics `__. metadata (google.api.monitored_resource_pb2.MonitoredResourceMetadata): Output only. The associated monitored resource metadata. When reading a time series, diff --git a/google/cloud/monitoring_v3/types/metric_service.py b/google/cloud/monitoring_v3/types/metric_service.py index 14d1005b..15cfc281 100644 --- a/google/cloud/monitoring_v3/types/metric_service.py +++ b/google/cloud/monitoring_v3/types/metric_service.py @@ -49,8 +49,9 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): r"""The ``ListMonitoredResourceDescriptors`` request. Attributes: name (str): - Required. The project on which to execute the request. The - format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -129,8 +130,9 @@ class ListMetricDescriptorsRequest(proto.Message): r"""The ``ListMetricDescriptors`` request. Attributes: name (str): - Required. The project on which to execute the request. The - format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -207,12 +209,10 @@ class CreateMetricDescriptorRequest(proto.Message): r"""The ``CreateMetricDescriptor`` request. Attributes: name (str): - Required. The project on which to execute the request. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] + Required. The + `project `__ + on which to execute the request. The format is: 4 + projects/[PROJECT_ID_OR_NUMBER] metric_descriptor (google.api.metric_pb2.MetricDescriptor): Required. The new `custom metric `__ @@ -247,8 +247,10 @@ class ListTimeSeriesRequest(proto.Message): r"""The ``ListTimeSeries`` request. Attributes: name (str): - Required. The project, organization or folder on which to - execute the request. The format is: + Required. The + `project `__, + organization or folder on which to execute the request. The + format is: :: @@ -362,8 +364,9 @@ class CreateTimeSeriesRequest(proto.Message): r"""The ``CreateTimeSeries`` request. Attributes: name (str): - Required. The project on which to execute the request. The - format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -438,8 +441,9 @@ class QueryTimeSeriesRequest(proto.Message): r"""The ``QueryTimeSeries`` request. Attributes: name (str): - Required. The project on which to execute the request. The - format is: + Required. The + `project `__ + on which to execute the request. The format is: :: diff --git a/google/cloud/monitoring_v3/types/notification.py b/google/cloud/monitoring_v3/types/notification.py index b1da8ada..1df93b17 100644 --- a/google/cloud/monitoring_v3/types/notification.py +++ b/google/cloud/monitoring_v3/types/notification.py @@ -17,7 +17,6 @@ from google.api import label_pb2 # type: ignore from google.api import launch_stage_pb2 # type: ignore -from google.cloud.monitoring_v3.types import common from google.cloud.monitoring_v3.types import mutation_record from google.protobuf import wrappers_pb2 # type: ignore @@ -64,9 +63,6 @@ class NotificationChannelDescriptor(proto.Message): corresponding type. Each label includes a description for how that field should be populated. - supported_tiers (Sequence[google.cloud.monitoring_v3.types.ServiceTier]): - The tiers that support this notification channel; the - project service tier must be one of the supported_tiers. launch_stage (google.api.launch_stage_pb2.LaunchStage): The product launch stage for channels of this type. @@ -79,9 +75,6 @@ class NotificationChannelDescriptor(proto.Message): labels = proto.RepeatedField( proto.MESSAGE, number=4, message=label_pb2.LabelDescriptor, ) - supported_tiers = proto.RepeatedField( - proto.ENUM, number=5, enum=common.ServiceTier, - ) launch_stage = proto.Field(proto.ENUM, number=7, enum=launch_stage_pb2.LaunchStage,) diff --git a/google/cloud/monitoring_v3/types/notification_service.py b/google/cloud/monitoring_v3/types/notification_service.py index 58886166..db736eb8 100644 --- a/google/cloud/monitoring_v3/types/notification_service.py +++ b/google/cloud/monitoring_v3/types/notification_service.py @@ -52,9 +52,10 @@ class ListNotificationChannelDescriptorsRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER] - Note that this names the parent container in which to look - for the descriptors; to retrieve a single descriptor by - name, use the + Note that this + `names `__ + the parent container in which to look for the descriptors; + to retrieve a single descriptor by name, use the [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] operation, instead. page_size (int): @@ -115,8 +116,9 @@ class CreateNotificationChannelRequest(proto.Message): r"""The ``CreateNotificationChannel`` request. Attributes: name (str): - Required. The project on which to execute the request. The - format is: + Required. The + `project `__ + on which to execute the request. The format is: :: @@ -143,8 +145,9 @@ class ListNotificationChannelsRequest(proto.Message): r"""The ``ListNotificationChannels`` request. Attributes: name (str): - Required. The project on which to execute the request. The - format is: + Required. The + `project `__ + on which to execute the request. The format is: :: diff --git a/google/cloud/monitoring_v3/types/service.py b/google/cloud/monitoring_v3/types/service.py index e6c71d51..13112cd7 100644 --- a/google/cloud/monitoring_v3/types/service.py +++ b/google/cloud/monitoring_v3/types/service.py @@ -71,6 +71,17 @@ class Service(proto.Message): telemetry (google.cloud.monitoring_v3.types.Service.Telemetry): Configuration for how to query telemetry on a Service. + user_labels (Sequence[google.cloud.monitoring_v3.types.Service.UserLabelsEntry]): + Labels which have been used to annotate the + service. Label keys must start with a letter. + Label keys and values may contain lowercase + letters, numbers, underscores, and dashes. Label + keys and values have a maximum length of 63 + characters, and must be less than 128 bytes in + size. Up to 64 label entries may be stored. For + labels which do not have a semantic value, the + empty string may be supplied for the label + value. """ class Custom(proto.Message): @@ -217,6 +228,7 @@ class Telemetry(proto.Message): proto.MESSAGE, number=11, oneof="identifier", message=IstioCanonicalService, ) telemetry = proto.Field(proto.MESSAGE, number=13, message=Telemetry,) + user_labels = proto.MapField(proto.STRING, proto.STRING, number=14,) class ServiceLevelObjective(proto.Message): @@ -254,6 +266,17 @@ class ServiceLevelObjective(proto.Message): A calendar period, semantically "since the start of the current ````". At this time, only ``DAY``, ``WEEK``, ``FORTNIGHT``, and ``MONTH`` are supported. + user_labels (Sequence[google.cloud.monitoring_v3.types.ServiceLevelObjective.UserLabelsEntry]): + Labels which have been used to annotate the + service-level objective. Label keys must start + with a letter. Label keys and values may contain + lowercase letters, numbers, underscores, and + dashes. Label keys and values have a maximum + length of 63 characters, and must be less than + 128 bytes in size. Up to 64 label entries may be + stored. For labels which do not have a semantic + value, the empty string may be supplied for the + label value. """ class View(proto.Enum): @@ -278,6 +301,7 @@ class View(proto.Enum): calendar_period = proto.Field( proto.ENUM, number=6, oneof="period", enum=calendar_period_pb2.CalendarPeriod, ) + user_labels = proto.MapField(proto.STRING, proto.STRING, number=12,) class ServiceLevelIndicator(proto.Message): @@ -384,11 +408,7 @@ class LatencyCriteria(proto.Message): class Range(proto.Message): - r"""Range of numerical values, inclusive of ``min`` and exclusive of - ``max``. If the open range "< range.max" is desired, set - ``range.min = -infinity``. If the open range ">= range.min" is - desired, set ``range.max = infinity``. - + r"""Range of numerical values within ``min`` and ``max``. Attributes: min_ (float): Range minimum. @@ -468,8 +488,8 @@ class DistributionCut(proto.Message): for measuring good service and total service. The ``TimeSeries`` must have ``ValueType = DISTRIBUTION`` and ``MetricKind = DELTA`` or ``MetricKind = CUMULATIVE``. The computed ``good_service`` will be - the count of values x in the ``Distribution`` such that - ``range.min <= x < range.max``. + the estimated count of values in the ``Distribution`` that fall + within the specified ``min`` and ``max``. Attributes: distribution_filter (str): @@ -536,9 +556,10 @@ class PerformanceThreshold(proto.Message): class MetricRange(proto.Message): r"""A ``MetricRange`` is used when each window is good when the value x - of a single ``TimeSeries`` satisfies ``range.min <= x < range.max``. - The provided ``TimeSeries`` must have ``ValueType = INT64`` or - ``ValueType = DOUBLE`` and ``MetricKind = GAUGE``. + of a single ``TimeSeries`` satisfies + ``range.min <= x <= range.max``. The provided ``TimeSeries`` must + have ``ValueType = INT64`` or ``ValueType = DOUBLE`` and + ``MetricKind = GAUGE``. Attributes: time_series (str): diff --git a/google/cloud/monitoring_v3/types/service_service.py b/google/cloud/monitoring_v3/types/service_service.py index 2b7db781..cd68e245 100644 --- a/google/cloud/monitoring_v3/types/service_service.py +++ b/google/cloud/monitoring_v3/types/service_service.py @@ -42,8 +42,9 @@ class CreateServiceRequest(proto.Message): r"""The ``CreateService`` request. Attributes: parent (str): - Required. Resource name of the parent workspace. The format - is: + Required. Resource + `name `__ + of the parent workspace. The format is: :: @@ -80,8 +81,9 @@ class ListServicesRequest(proto.Message): Attributes: parent (str): Required. Resource name of the parent containing the listed - services, either a project or a Monitoring Workspace. The - formats are: + services, either a + `project `__ + or a Monitoring Workspace. The formats are: :: diff --git a/google/cloud/monitoring_v3/types/span_context.py b/google/cloud/monitoring_v3/types/span_context.py index 5689065b..d33b3c24 100644 --- a/google/cloud/monitoring_v3/types/span_context.py +++ b/google/cloud/monitoring_v3/types/span_context.py @@ -20,8 +20,8 @@ class SpanContext(proto.Message): - r"""The context of a span, attached to - [Exemplars][google.api.Distribution.Exemplars] in + r"""The context of a span. This is attached to an + [Exemplar][google.api.Distribution.Exemplar] in [Distribution][google.api.Distribution] values during aggregation. It contains the name of a span with format: diff --git a/google/cloud/monitoring_v3/types/uptime.py b/google/cloud/monitoring_v3/types/uptime.py index 9a6fa92b..e3218bc2 100644 --- a/google/cloud/monitoring_v3/types/uptime.py +++ b/google/cloud/monitoring_v3/types/uptime.py @@ -134,9 +134,9 @@ class UptimeCheckConfig(proto.Message): The `monitored resource `__ associated with the configuration. The following monitored - resource types are supported for Uptime checks: - ``uptime_url``, ``gce_instance``, ``gae_app``, - ``aws_ec2_instance``, ``aws_elb_load_balancer`` + resource types are valid for this field: ``uptime_url``, + ``gce_instance``, ``gae_app``, ``aws_ec2_instance``, + ``aws_elb_load_balancer`` ``k8s_service`` resource_group (google.cloud.monitoring_v3.types.UptimeCheckConfig.ResourceGroup): The group resource associated with the configuration. diff --git a/google/cloud/monitoring_v3/types/uptime_service.py b/google/cloud/monitoring_v3/types/uptime_service.py index b7780564..5f2bc919 100644 --- a/google/cloud/monitoring_v3/types/uptime_service.py +++ b/google/cloud/monitoring_v3/types/uptime_service.py @@ -38,8 +38,9 @@ class ListUptimeCheckConfigsRequest(proto.Message): r"""The protocol for the ``ListUptimeCheckConfigs`` request. Attributes: parent (str): - Required. The project whose Uptime check configurations are - listed. The format is: + Required. The + `project `__ + whose Uptime check configurations are listed. The format is: :: @@ -110,8 +111,9 @@ class CreateUptimeCheckConfigRequest(proto.Message): r"""The protocol for the ``CreateUptimeCheckConfig`` request. Attributes: parent (str): - Required. The project in which to create the Uptime check. - The format is: + Required. The + `project `__ + in which to create the Uptime check. The format is: :: diff --git a/owl-bot-staging/v3/.coveragerc b/owl-bot-staging/v3/.coveragerc deleted file mode 100644 index 130673a5..00000000 --- a/owl-bot-staging/v3/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/monitoring/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v3/MANIFEST.in b/owl-bot-staging/v3/MANIFEST.in deleted file mode 100644 index cc42f1eb..00000000 --- a/owl-bot-staging/v3/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/monitoring *.py -recursive-include google/cloud/monitoring_v3 *.py diff --git a/owl-bot-staging/v3/README.rst b/owl-bot-staging/v3/README.rst deleted file mode 100644 index 1f076464..00000000 --- a/owl-bot-staging/v3/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Monitoring API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Monitoring API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v3/docs/conf.py b/owl-bot-staging/v3/docs/conf.py deleted file mode 100644 index 12adfd5b..00000000 --- a/owl-bot-staging/v3/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-monitoring documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-monitoring" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-monitoring-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-monitoring.tex", - u"google-cloud-monitoring Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-monitoring", - u"Google Cloud Monitoring Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-monitoring", - u"google-cloud-monitoring Documentation", - author, - "google-cloud-monitoring", - "GAPIC library for Google Cloud Monitoring API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v3/docs/index.rst b/owl-bot-staging/v3/docs/index.rst deleted file mode 100644 index d0a12177..00000000 --- a/owl-bot-staging/v3/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - monitoring_v3/services - monitoring_v3/types diff --git a/owl-bot-staging/v3/docs/monitoring_v3/alert_policy_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/alert_policy_service.rst deleted file mode 100644 index a11da806..00000000 --- a/owl-bot-staging/v3/docs/monitoring_v3/alert_policy_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AlertPolicyService ------------------------------------- - -.. automodule:: google.cloud.monitoring_v3.services.alert_policy_service - :members: - :inherited-members: - -.. automodule:: google.cloud.monitoring_v3.services.alert_policy_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/group_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/group_service.rst deleted file mode 100644 index 74703e5e..00000000 --- a/owl-bot-staging/v3/docs/monitoring_v3/group_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -GroupService ------------------------------- - -.. automodule:: google.cloud.monitoring_v3.services.group_service - :members: - :inherited-members: - -.. automodule:: google.cloud.monitoring_v3.services.group_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/metric_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/metric_service.rst deleted file mode 100644 index 74a1d3cf..00000000 --- a/owl-bot-staging/v3/docs/monitoring_v3/metric_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -MetricService -------------------------------- - -.. automodule:: google.cloud.monitoring_v3.services.metric_service - :members: - :inherited-members: - -.. automodule:: google.cloud.monitoring_v3.services.metric_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/notification_channel_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/notification_channel_service.rst deleted file mode 100644 index 85e28278..00000000 --- a/owl-bot-staging/v3/docs/monitoring_v3/notification_channel_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -NotificationChannelService --------------------------------------------- - -.. automodule:: google.cloud.monitoring_v3.services.notification_channel_service - :members: - :inherited-members: - -.. automodule:: google.cloud.monitoring_v3.services.notification_channel_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/query_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/query_service.rst deleted file mode 100644 index b144dc56..00000000 --- a/owl-bot-staging/v3/docs/monitoring_v3/query_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -QueryService ------------------------------- - -.. automodule:: google.cloud.monitoring_v3.services.query_service - :members: - :inherited-members: - -.. automodule:: google.cloud.monitoring_v3.services.query_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/service_monitoring_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/service_monitoring_service.rst deleted file mode 100644 index f7c35fa6..00000000 --- a/owl-bot-staging/v3/docs/monitoring_v3/service_monitoring_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ServiceMonitoringService ------------------------------------------- - -.. automodule:: google.cloud.monitoring_v3.services.service_monitoring_service - :members: - :inherited-members: - -.. automodule:: google.cloud.monitoring_v3.services.service_monitoring_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/services.rst b/owl-bot-staging/v3/docs/monitoring_v3/services.rst deleted file mode 100644 index 18a1eb15..00000000 --- a/owl-bot-staging/v3/docs/monitoring_v3/services.rst +++ /dev/null @@ -1,12 +0,0 @@ -Services for Google Cloud Monitoring v3 API -=========================================== -.. toctree:: - :maxdepth: 2 - - alert_policy_service - group_service - metric_service - notification_channel_service - query_service - service_monitoring_service - uptime_check_service diff --git a/owl-bot-staging/v3/docs/monitoring_v3/types.rst b/owl-bot-staging/v3/docs/monitoring_v3/types.rst deleted file mode 100644 index ed0eeeef..00000000 --- a/owl-bot-staging/v3/docs/monitoring_v3/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Monitoring v3 API -======================================== - -.. automodule:: google.cloud.monitoring_v3.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/v3/docs/monitoring_v3/uptime_check_service.rst b/owl-bot-staging/v3/docs/monitoring_v3/uptime_check_service.rst deleted file mode 100644 index 7149a771..00000000 --- a/owl-bot-staging/v3/docs/monitoring_v3/uptime_check_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -UptimeCheckService ------------------------------------- - -.. automodule:: google.cloud.monitoring_v3.services.uptime_check_service - :members: - :inherited-members: - -.. automodule:: google.cloud.monitoring_v3.services.uptime_check_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v3/google/cloud/monitoring/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring/__init__.py deleted file mode 100644 index 3bc62397..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring/__init__.py +++ /dev/null @@ -1,237 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.monitoring_v3.services.alert_policy_service.client import AlertPolicyServiceClient -from google.cloud.monitoring_v3.services.alert_policy_service.async_client import AlertPolicyServiceAsyncClient -from google.cloud.monitoring_v3.services.group_service.client import GroupServiceClient -from google.cloud.monitoring_v3.services.group_service.async_client import GroupServiceAsyncClient -from google.cloud.monitoring_v3.services.metric_service.client import MetricServiceClient -from google.cloud.monitoring_v3.services.metric_service.async_client import MetricServiceAsyncClient -from google.cloud.monitoring_v3.services.notification_channel_service.client import NotificationChannelServiceClient -from google.cloud.monitoring_v3.services.notification_channel_service.async_client import NotificationChannelServiceAsyncClient -from google.cloud.monitoring_v3.services.query_service.client import QueryServiceClient -from google.cloud.monitoring_v3.services.query_service.async_client import QueryServiceAsyncClient -from google.cloud.monitoring_v3.services.service_monitoring_service.client import ServiceMonitoringServiceClient -from google.cloud.monitoring_v3.services.service_monitoring_service.async_client import ServiceMonitoringServiceAsyncClient -from google.cloud.monitoring_v3.services.uptime_check_service.client import UptimeCheckServiceClient -from google.cloud.monitoring_v3.services.uptime_check_service.async_client import UptimeCheckServiceAsyncClient - -from google.cloud.monitoring_v3.types.alert import AlertPolicy -from google.cloud.monitoring_v3.types.alert_service import CreateAlertPolicyRequest -from google.cloud.monitoring_v3.types.alert_service import DeleteAlertPolicyRequest -from google.cloud.monitoring_v3.types.alert_service import GetAlertPolicyRequest -from google.cloud.monitoring_v3.types.alert_service import ListAlertPoliciesRequest -from google.cloud.monitoring_v3.types.alert_service import ListAlertPoliciesResponse -from google.cloud.monitoring_v3.types.alert_service import UpdateAlertPolicyRequest -from google.cloud.monitoring_v3.types.common import Aggregation -from google.cloud.monitoring_v3.types.common import TimeInterval -from google.cloud.monitoring_v3.types.common import TypedValue -from google.cloud.monitoring_v3.types.common import ComparisonType -from google.cloud.monitoring_v3.types.dropped_labels import DroppedLabels -from google.cloud.monitoring_v3.types.group import Group -from google.cloud.monitoring_v3.types.group_service import CreateGroupRequest -from google.cloud.monitoring_v3.types.group_service import DeleteGroupRequest -from google.cloud.monitoring_v3.types.group_service import GetGroupRequest -from google.cloud.monitoring_v3.types.group_service import ListGroupMembersRequest -from google.cloud.monitoring_v3.types.group_service import ListGroupMembersResponse -from google.cloud.monitoring_v3.types.group_service import ListGroupsRequest -from google.cloud.monitoring_v3.types.group_service import ListGroupsResponse -from google.cloud.monitoring_v3.types.group_service import UpdateGroupRequest -from google.cloud.monitoring_v3.types.metric import LabelValue -from google.cloud.monitoring_v3.types.metric import Point -from google.cloud.monitoring_v3.types.metric import QueryError -from google.cloud.monitoring_v3.types.metric import TextLocator -from google.cloud.monitoring_v3.types.metric import TimeSeries -from google.cloud.monitoring_v3.types.metric import TimeSeriesData -from google.cloud.monitoring_v3.types.metric import TimeSeriesDescriptor -from google.cloud.monitoring_v3.types.metric_service import CreateMetricDescriptorRequest -from google.cloud.monitoring_v3.types.metric_service import CreateTimeSeriesError -from google.cloud.monitoring_v3.types.metric_service import CreateTimeSeriesRequest -from google.cloud.monitoring_v3.types.metric_service import CreateTimeSeriesSummary -from google.cloud.monitoring_v3.types.metric_service import DeleteMetricDescriptorRequest -from google.cloud.monitoring_v3.types.metric_service import GetMetricDescriptorRequest -from google.cloud.monitoring_v3.types.metric_service import GetMonitoredResourceDescriptorRequest -from google.cloud.monitoring_v3.types.metric_service import ListMetricDescriptorsRequest -from google.cloud.monitoring_v3.types.metric_service import ListMetricDescriptorsResponse -from google.cloud.monitoring_v3.types.metric_service import ListMonitoredResourceDescriptorsRequest -from google.cloud.monitoring_v3.types.metric_service import ListMonitoredResourceDescriptorsResponse -from google.cloud.monitoring_v3.types.metric_service import ListTimeSeriesRequest -from google.cloud.monitoring_v3.types.metric_service import ListTimeSeriesResponse -from google.cloud.monitoring_v3.types.metric_service import QueryErrorList -from google.cloud.monitoring_v3.types.metric_service import QueryTimeSeriesRequest -from google.cloud.monitoring_v3.types.metric_service import QueryTimeSeriesResponse -from google.cloud.monitoring_v3.types.mutation_record import MutationRecord -from google.cloud.monitoring_v3.types.notification import NotificationChannel -from google.cloud.monitoring_v3.types.notification import NotificationChannelDescriptor -from google.cloud.monitoring_v3.types.notification_service import CreateNotificationChannelRequest -from google.cloud.monitoring_v3.types.notification_service import DeleteNotificationChannelRequest -from google.cloud.monitoring_v3.types.notification_service import GetNotificationChannelDescriptorRequest -from google.cloud.monitoring_v3.types.notification_service import GetNotificationChannelRequest -from google.cloud.monitoring_v3.types.notification_service import GetNotificationChannelVerificationCodeRequest -from google.cloud.monitoring_v3.types.notification_service import GetNotificationChannelVerificationCodeResponse -from google.cloud.monitoring_v3.types.notification_service import ListNotificationChannelDescriptorsRequest -from google.cloud.monitoring_v3.types.notification_service import ListNotificationChannelDescriptorsResponse -from google.cloud.monitoring_v3.types.notification_service import ListNotificationChannelsRequest -from google.cloud.monitoring_v3.types.notification_service import ListNotificationChannelsResponse -from google.cloud.monitoring_v3.types.notification_service import SendNotificationChannelVerificationCodeRequest -from google.cloud.monitoring_v3.types.notification_service import UpdateNotificationChannelRequest -from google.cloud.monitoring_v3.types.notification_service import VerifyNotificationChannelRequest -from google.cloud.monitoring_v3.types.service import BasicSli -from google.cloud.monitoring_v3.types.service import DistributionCut -from google.cloud.monitoring_v3.types.service import Range -from google.cloud.monitoring_v3.types.service import RequestBasedSli -from google.cloud.monitoring_v3.types.service import Service -from google.cloud.monitoring_v3.types.service import ServiceLevelIndicator -from google.cloud.monitoring_v3.types.service import ServiceLevelObjective -from google.cloud.monitoring_v3.types.service import TimeSeriesRatio -from google.cloud.monitoring_v3.types.service import WindowsBasedSli -from google.cloud.monitoring_v3.types.service_service import CreateServiceLevelObjectiveRequest -from google.cloud.monitoring_v3.types.service_service import CreateServiceRequest -from google.cloud.monitoring_v3.types.service_service import DeleteServiceLevelObjectiveRequest -from google.cloud.monitoring_v3.types.service_service import DeleteServiceRequest -from google.cloud.monitoring_v3.types.service_service import GetServiceLevelObjectiveRequest -from google.cloud.monitoring_v3.types.service_service import GetServiceRequest -from google.cloud.monitoring_v3.types.service_service import ListServiceLevelObjectivesRequest -from google.cloud.monitoring_v3.types.service_service import ListServiceLevelObjectivesResponse -from google.cloud.monitoring_v3.types.service_service import ListServicesRequest -from google.cloud.monitoring_v3.types.service_service import ListServicesResponse -from google.cloud.monitoring_v3.types.service_service import UpdateServiceLevelObjectiveRequest -from google.cloud.monitoring_v3.types.service_service import UpdateServiceRequest -from google.cloud.monitoring_v3.types.span_context import SpanContext -from google.cloud.monitoring_v3.types.uptime import InternalChecker -from google.cloud.monitoring_v3.types.uptime import UptimeCheckConfig -from google.cloud.monitoring_v3.types.uptime import UptimeCheckIp -from google.cloud.monitoring_v3.types.uptime import GroupResourceType -from google.cloud.monitoring_v3.types.uptime import UptimeCheckRegion -from google.cloud.monitoring_v3.types.uptime_service import CreateUptimeCheckConfigRequest -from google.cloud.monitoring_v3.types.uptime_service import DeleteUptimeCheckConfigRequest -from google.cloud.monitoring_v3.types.uptime_service import GetUptimeCheckConfigRequest -from google.cloud.monitoring_v3.types.uptime_service import ListUptimeCheckConfigsRequest -from google.cloud.monitoring_v3.types.uptime_service import ListUptimeCheckConfigsResponse -from google.cloud.monitoring_v3.types.uptime_service import ListUptimeCheckIpsRequest -from google.cloud.monitoring_v3.types.uptime_service import ListUptimeCheckIpsResponse -from google.cloud.monitoring_v3.types.uptime_service import UpdateUptimeCheckConfigRequest - -__all__ = ('AlertPolicyServiceClient', - 'AlertPolicyServiceAsyncClient', - 'GroupServiceClient', - 'GroupServiceAsyncClient', - 'MetricServiceClient', - 'MetricServiceAsyncClient', - 'NotificationChannelServiceClient', - 'NotificationChannelServiceAsyncClient', - 'QueryServiceClient', - 'QueryServiceAsyncClient', - 'ServiceMonitoringServiceClient', - 'ServiceMonitoringServiceAsyncClient', - 'UptimeCheckServiceClient', - 'UptimeCheckServiceAsyncClient', - 'AlertPolicy', - 'CreateAlertPolicyRequest', - 'DeleteAlertPolicyRequest', - 'GetAlertPolicyRequest', - 'ListAlertPoliciesRequest', - 'ListAlertPoliciesResponse', - 'UpdateAlertPolicyRequest', - 'Aggregation', - 'TimeInterval', - 'TypedValue', - 'ComparisonType', - 'DroppedLabels', - 'Group', - 'CreateGroupRequest', - 'DeleteGroupRequest', - 'GetGroupRequest', - 'ListGroupMembersRequest', - 'ListGroupMembersResponse', - 'ListGroupsRequest', - 'ListGroupsResponse', - 'UpdateGroupRequest', - 'LabelValue', - 'Point', - 'QueryError', - 'TextLocator', - 'TimeSeries', - 'TimeSeriesData', - 'TimeSeriesDescriptor', - 'CreateMetricDescriptorRequest', - 'CreateTimeSeriesError', - 'CreateTimeSeriesRequest', - 'CreateTimeSeriesSummary', - 'DeleteMetricDescriptorRequest', - 'GetMetricDescriptorRequest', - 'GetMonitoredResourceDescriptorRequest', - 'ListMetricDescriptorsRequest', - 'ListMetricDescriptorsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'ListTimeSeriesRequest', - 'ListTimeSeriesResponse', - 'QueryErrorList', - 'QueryTimeSeriesRequest', - 'QueryTimeSeriesResponse', - 'MutationRecord', - 'NotificationChannel', - 'NotificationChannelDescriptor', - 'CreateNotificationChannelRequest', - 'DeleteNotificationChannelRequest', - 'GetNotificationChannelDescriptorRequest', - 'GetNotificationChannelRequest', - 'GetNotificationChannelVerificationCodeRequest', - 'GetNotificationChannelVerificationCodeResponse', - 'ListNotificationChannelDescriptorsRequest', - 'ListNotificationChannelDescriptorsResponse', - 'ListNotificationChannelsRequest', - 'ListNotificationChannelsResponse', - 'SendNotificationChannelVerificationCodeRequest', - 'UpdateNotificationChannelRequest', - 'VerifyNotificationChannelRequest', - 'BasicSli', - 'DistributionCut', - 'Range', - 'RequestBasedSli', - 'Service', - 'ServiceLevelIndicator', - 'ServiceLevelObjective', - 'TimeSeriesRatio', - 'WindowsBasedSli', - 'CreateServiceLevelObjectiveRequest', - 'CreateServiceRequest', - 'DeleteServiceLevelObjectiveRequest', - 'DeleteServiceRequest', - 'GetServiceLevelObjectiveRequest', - 'GetServiceRequest', - 'ListServiceLevelObjectivesRequest', - 'ListServiceLevelObjectivesResponse', - 'ListServicesRequest', - 'ListServicesResponse', - 'UpdateServiceLevelObjectiveRequest', - 'UpdateServiceRequest', - 'SpanContext', - 'InternalChecker', - 'UptimeCheckConfig', - 'UptimeCheckIp', - 'GroupResourceType', - 'UptimeCheckRegion', - 'CreateUptimeCheckConfigRequest', - 'DeleteUptimeCheckConfigRequest', - 'GetUptimeCheckConfigRequest', - 'ListUptimeCheckConfigsRequest', - 'ListUptimeCheckConfigsResponse', - 'ListUptimeCheckIpsRequest', - 'ListUptimeCheckIpsResponse', - 'UpdateUptimeCheckConfigRequest', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring/py.typed b/owl-bot-staging/v3/google/cloud/monitoring/py.typed deleted file mode 100644 index 55d895b0..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-monitoring package uses inline types. diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/__init__.py deleted file mode 100644 index 95f197d4..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/__init__.py +++ /dev/null @@ -1,238 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.alert_policy_service import AlertPolicyServiceClient -from .services.alert_policy_service import AlertPolicyServiceAsyncClient -from .services.group_service import GroupServiceClient -from .services.group_service import GroupServiceAsyncClient -from .services.metric_service import MetricServiceClient -from .services.metric_service import MetricServiceAsyncClient -from .services.notification_channel_service import NotificationChannelServiceClient -from .services.notification_channel_service import NotificationChannelServiceAsyncClient -from .services.query_service import QueryServiceClient -from .services.query_service import QueryServiceAsyncClient -from .services.service_monitoring_service import ServiceMonitoringServiceClient -from .services.service_monitoring_service import ServiceMonitoringServiceAsyncClient -from .services.uptime_check_service import UptimeCheckServiceClient -from .services.uptime_check_service import UptimeCheckServiceAsyncClient - -from .types.alert import AlertPolicy -from .types.alert_service import CreateAlertPolicyRequest -from .types.alert_service import DeleteAlertPolicyRequest -from .types.alert_service import GetAlertPolicyRequest -from .types.alert_service import ListAlertPoliciesRequest -from .types.alert_service import ListAlertPoliciesResponse -from .types.alert_service import UpdateAlertPolicyRequest -from .types.common import Aggregation -from .types.common import TimeInterval -from .types.common import TypedValue -from .types.common import ComparisonType -from .types.dropped_labels import DroppedLabels -from .types.group import Group -from .types.group_service import CreateGroupRequest -from .types.group_service import DeleteGroupRequest -from .types.group_service import GetGroupRequest -from .types.group_service import ListGroupMembersRequest -from .types.group_service import ListGroupMembersResponse -from .types.group_service import ListGroupsRequest -from .types.group_service import ListGroupsResponse -from .types.group_service import UpdateGroupRequest -from .types.metric import LabelValue -from .types.metric import Point -from .types.metric import QueryError -from .types.metric import TextLocator -from .types.metric import TimeSeries -from .types.metric import TimeSeriesData -from .types.metric import TimeSeriesDescriptor -from .types.metric_service import CreateMetricDescriptorRequest -from .types.metric_service import CreateTimeSeriesError -from .types.metric_service import CreateTimeSeriesRequest -from .types.metric_service import CreateTimeSeriesSummary -from .types.metric_service import DeleteMetricDescriptorRequest -from .types.metric_service import GetMetricDescriptorRequest -from .types.metric_service import GetMonitoredResourceDescriptorRequest -from .types.metric_service import ListMetricDescriptorsRequest -from .types.metric_service import ListMetricDescriptorsResponse -from .types.metric_service import ListMonitoredResourceDescriptorsRequest -from .types.metric_service import ListMonitoredResourceDescriptorsResponse -from .types.metric_service import ListTimeSeriesRequest -from .types.metric_service import ListTimeSeriesResponse -from .types.metric_service import QueryErrorList -from .types.metric_service import QueryTimeSeriesRequest -from .types.metric_service import QueryTimeSeriesResponse -from .types.mutation_record import MutationRecord -from .types.notification import NotificationChannel -from .types.notification import NotificationChannelDescriptor -from .types.notification_service import CreateNotificationChannelRequest -from .types.notification_service import DeleteNotificationChannelRequest -from .types.notification_service import GetNotificationChannelDescriptorRequest -from .types.notification_service import GetNotificationChannelRequest -from .types.notification_service import GetNotificationChannelVerificationCodeRequest -from .types.notification_service import GetNotificationChannelVerificationCodeResponse -from .types.notification_service import ListNotificationChannelDescriptorsRequest -from .types.notification_service import ListNotificationChannelDescriptorsResponse -from .types.notification_service import ListNotificationChannelsRequest -from .types.notification_service import ListNotificationChannelsResponse -from .types.notification_service import SendNotificationChannelVerificationCodeRequest -from .types.notification_service import UpdateNotificationChannelRequest -from .types.notification_service import VerifyNotificationChannelRequest -from .types.service import BasicSli -from .types.service import DistributionCut -from .types.service import Range -from .types.service import RequestBasedSli -from .types.service import Service -from .types.service import ServiceLevelIndicator -from .types.service import ServiceLevelObjective -from .types.service import TimeSeriesRatio -from .types.service import WindowsBasedSli -from .types.service_service import CreateServiceLevelObjectiveRequest -from .types.service_service import CreateServiceRequest -from .types.service_service import DeleteServiceLevelObjectiveRequest -from .types.service_service import DeleteServiceRequest -from .types.service_service import GetServiceLevelObjectiveRequest -from .types.service_service import GetServiceRequest -from .types.service_service import ListServiceLevelObjectivesRequest -from .types.service_service import ListServiceLevelObjectivesResponse -from .types.service_service import ListServicesRequest -from .types.service_service import ListServicesResponse -from .types.service_service import UpdateServiceLevelObjectiveRequest -from .types.service_service import UpdateServiceRequest -from .types.span_context import SpanContext -from .types.uptime import InternalChecker -from .types.uptime import UptimeCheckConfig -from .types.uptime import UptimeCheckIp -from .types.uptime import GroupResourceType -from .types.uptime import UptimeCheckRegion -from .types.uptime_service import CreateUptimeCheckConfigRequest -from .types.uptime_service import DeleteUptimeCheckConfigRequest -from .types.uptime_service import GetUptimeCheckConfigRequest -from .types.uptime_service import ListUptimeCheckConfigsRequest -from .types.uptime_service import ListUptimeCheckConfigsResponse -from .types.uptime_service import ListUptimeCheckIpsRequest -from .types.uptime_service import ListUptimeCheckIpsResponse -from .types.uptime_service import UpdateUptimeCheckConfigRequest - -__all__ = ( - 'AlertPolicyServiceAsyncClient', - 'GroupServiceAsyncClient', - 'MetricServiceAsyncClient', - 'NotificationChannelServiceAsyncClient', - 'QueryServiceAsyncClient', - 'ServiceMonitoringServiceAsyncClient', - 'UptimeCheckServiceAsyncClient', -'Aggregation', -'AlertPolicy', -'AlertPolicyServiceClient', -'BasicSli', -'ComparisonType', -'CreateAlertPolicyRequest', -'CreateGroupRequest', -'CreateMetricDescriptorRequest', -'CreateNotificationChannelRequest', -'CreateServiceLevelObjectiveRequest', -'CreateServiceRequest', -'CreateTimeSeriesError', -'CreateTimeSeriesRequest', -'CreateTimeSeriesSummary', -'CreateUptimeCheckConfigRequest', -'DeleteAlertPolicyRequest', -'DeleteGroupRequest', -'DeleteMetricDescriptorRequest', -'DeleteNotificationChannelRequest', -'DeleteServiceLevelObjectiveRequest', -'DeleteServiceRequest', -'DeleteUptimeCheckConfigRequest', -'DistributionCut', -'DroppedLabels', -'GetAlertPolicyRequest', -'GetGroupRequest', -'GetMetricDescriptorRequest', -'GetMonitoredResourceDescriptorRequest', -'GetNotificationChannelDescriptorRequest', -'GetNotificationChannelRequest', -'GetNotificationChannelVerificationCodeRequest', -'GetNotificationChannelVerificationCodeResponse', -'GetServiceLevelObjectiveRequest', -'GetServiceRequest', -'GetUptimeCheckConfigRequest', -'Group', -'GroupResourceType', -'GroupServiceClient', -'InternalChecker', -'LabelValue', -'ListAlertPoliciesRequest', -'ListAlertPoliciesResponse', -'ListGroupMembersRequest', -'ListGroupMembersResponse', -'ListGroupsRequest', -'ListGroupsResponse', -'ListMetricDescriptorsRequest', -'ListMetricDescriptorsResponse', -'ListMonitoredResourceDescriptorsRequest', -'ListMonitoredResourceDescriptorsResponse', -'ListNotificationChannelDescriptorsRequest', -'ListNotificationChannelDescriptorsResponse', -'ListNotificationChannelsRequest', -'ListNotificationChannelsResponse', -'ListServiceLevelObjectivesRequest', -'ListServiceLevelObjectivesResponse', -'ListServicesRequest', -'ListServicesResponse', -'ListTimeSeriesRequest', -'ListTimeSeriesResponse', -'ListUptimeCheckConfigsRequest', -'ListUptimeCheckConfigsResponse', -'ListUptimeCheckIpsRequest', -'ListUptimeCheckIpsResponse', -'MetricServiceClient', -'MutationRecord', -'NotificationChannel', -'NotificationChannelDescriptor', -'NotificationChannelServiceClient', -'Point', -'QueryError', -'QueryErrorList', -'QueryServiceClient', -'QueryTimeSeriesRequest', -'QueryTimeSeriesResponse', -'Range', -'RequestBasedSli', -'SendNotificationChannelVerificationCodeRequest', -'Service', -'ServiceLevelIndicator', -'ServiceLevelObjective', -'ServiceMonitoringServiceClient', -'SpanContext', -'TextLocator', -'TimeInterval', -'TimeSeries', -'TimeSeriesData', -'TimeSeriesDescriptor', -'TimeSeriesRatio', -'TypedValue', -'UpdateAlertPolicyRequest', -'UpdateGroupRequest', -'UpdateNotificationChannelRequest', -'UpdateServiceLevelObjectiveRequest', -'UpdateServiceRequest', -'UpdateUptimeCheckConfigRequest', -'UptimeCheckConfig', -'UptimeCheckIp', -'UptimeCheckRegion', -'UptimeCheckServiceClient', -'VerifyNotificationChannelRequest', -'WindowsBasedSli', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/gapic_metadata.json b/owl-bot-staging/v3/google/cloud/monitoring_v3/gapic_metadata.json deleted file mode 100644 index 0b3e214a..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/gapic_metadata.json +++ /dev/null @@ -1,567 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.monitoring_v3", - "protoPackage": "google.monitoring.v3", - "schema": "1.0", - "services": { - "AlertPolicyService": { - "clients": { - "grpc": { - "libraryClient": "AlertPolicyServiceClient", - "rpcs": { - "CreateAlertPolicy": { - "methods": [ - "create_alert_policy" - ] - }, - "DeleteAlertPolicy": { - "methods": [ - "delete_alert_policy" - ] - }, - "GetAlertPolicy": { - "methods": [ - "get_alert_policy" - ] - }, - "ListAlertPolicies": { - "methods": [ - "list_alert_policies" - ] - }, - "UpdateAlertPolicy": { - "methods": [ - "update_alert_policy" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AlertPolicyServiceAsyncClient", - "rpcs": { - "CreateAlertPolicy": { - "methods": [ - "create_alert_policy" - ] - }, - "DeleteAlertPolicy": { - "methods": [ - "delete_alert_policy" - ] - }, - "GetAlertPolicy": { - "methods": [ - "get_alert_policy" - ] - }, - "ListAlertPolicies": { - "methods": [ - "list_alert_policies" - ] - }, - "UpdateAlertPolicy": { - "methods": [ - "update_alert_policy" - ] - } - } - } - } - }, - "GroupService": { - "clients": { - "grpc": { - "libraryClient": "GroupServiceClient", - "rpcs": { - "CreateGroup": { - "methods": [ - "create_group" - ] - }, - "DeleteGroup": { - "methods": [ - "delete_group" - ] - }, - "GetGroup": { - "methods": [ - "get_group" - ] - }, - "ListGroupMembers": { - "methods": [ - "list_group_members" - ] - }, - "ListGroups": { - "methods": [ - "list_groups" - ] - }, - "UpdateGroup": { - "methods": [ - "update_group" - ] - } - } - }, - "grpc-async": { - "libraryClient": "GroupServiceAsyncClient", - "rpcs": { - "CreateGroup": { - "methods": [ - "create_group" - ] - }, - "DeleteGroup": { - "methods": [ - "delete_group" - ] - }, - "GetGroup": { - "methods": [ - "get_group" - ] - }, - "ListGroupMembers": { - "methods": [ - "list_group_members" - ] - }, - "ListGroups": { - "methods": [ - "list_groups" - ] - }, - "UpdateGroup": { - "methods": [ - "update_group" - ] - } - } - } - } - }, - "MetricService": { - "clients": { - "grpc": { - "libraryClient": "MetricServiceClient", - "rpcs": { - "CreateMetricDescriptor": { - "methods": [ - "create_metric_descriptor" - ] - }, - "CreateTimeSeries": { - "methods": [ - "create_time_series" - ] - }, - "DeleteMetricDescriptor": { - "methods": [ - "delete_metric_descriptor" - ] - }, - "GetMetricDescriptor": { - "methods": [ - "get_metric_descriptor" - ] - }, - "GetMonitoredResourceDescriptor": { - "methods": [ - "get_monitored_resource_descriptor" - ] - }, - "ListMetricDescriptors": { - "methods": [ - "list_metric_descriptors" - ] - }, - "ListMonitoredResourceDescriptors": { - "methods": [ - "list_monitored_resource_descriptors" - ] - }, - "ListTimeSeries": { - "methods": [ - "list_time_series" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MetricServiceAsyncClient", - "rpcs": { - "CreateMetricDescriptor": { - "methods": [ - "create_metric_descriptor" - ] - }, - "CreateTimeSeries": { - "methods": [ - "create_time_series" - ] - }, - "DeleteMetricDescriptor": { - "methods": [ - "delete_metric_descriptor" - ] - }, - "GetMetricDescriptor": { - "methods": [ - "get_metric_descriptor" - ] - }, - "GetMonitoredResourceDescriptor": { - "methods": [ - "get_monitored_resource_descriptor" - ] - }, - "ListMetricDescriptors": { - "methods": [ - "list_metric_descriptors" - ] - }, - "ListMonitoredResourceDescriptors": { - "methods": [ - "list_monitored_resource_descriptors" - ] - }, - "ListTimeSeries": { - "methods": [ - "list_time_series" - ] - } - } - } - } - }, - "NotificationChannelService": { - "clients": { - "grpc": { - "libraryClient": "NotificationChannelServiceClient", - "rpcs": { - "CreateNotificationChannel": { - "methods": [ - "create_notification_channel" - ] - }, - "DeleteNotificationChannel": { - "methods": [ - "delete_notification_channel" - ] - }, - "GetNotificationChannel": { - "methods": [ - "get_notification_channel" - ] - }, - "GetNotificationChannelDescriptor": { - "methods": [ - "get_notification_channel_descriptor" - ] - }, - "GetNotificationChannelVerificationCode": { - "methods": [ - "get_notification_channel_verification_code" - ] - }, - "ListNotificationChannelDescriptors": { - "methods": [ - "list_notification_channel_descriptors" - ] - }, - "ListNotificationChannels": { - "methods": [ - "list_notification_channels" - ] - }, - "SendNotificationChannelVerificationCode": { - "methods": [ - "send_notification_channel_verification_code" - ] - }, - "UpdateNotificationChannel": { - "methods": [ - "update_notification_channel" - ] - }, - "VerifyNotificationChannel": { - "methods": [ - "verify_notification_channel" - ] - } - } - }, - "grpc-async": { - "libraryClient": "NotificationChannelServiceAsyncClient", - "rpcs": { - "CreateNotificationChannel": { - "methods": [ - "create_notification_channel" - ] - }, - "DeleteNotificationChannel": { - "methods": [ - "delete_notification_channel" - ] - }, - "GetNotificationChannel": { - "methods": [ - "get_notification_channel" - ] - }, - "GetNotificationChannelDescriptor": { - "methods": [ - "get_notification_channel_descriptor" - ] - }, - "GetNotificationChannelVerificationCode": { - "methods": [ - "get_notification_channel_verification_code" - ] - }, - "ListNotificationChannelDescriptors": { - "methods": [ - "list_notification_channel_descriptors" - ] - }, - "ListNotificationChannels": { - "methods": [ - "list_notification_channels" - ] - }, - "SendNotificationChannelVerificationCode": { - "methods": [ - "send_notification_channel_verification_code" - ] - }, - "UpdateNotificationChannel": { - "methods": [ - "update_notification_channel" - ] - }, - "VerifyNotificationChannel": { - "methods": [ - "verify_notification_channel" - ] - } - } - } - } - }, - "QueryService": { - "clients": { - "grpc": { - "libraryClient": "QueryServiceClient", - "rpcs": { - "QueryTimeSeries": { - "methods": [ - "query_time_series" - ] - } - } - }, - "grpc-async": { - "libraryClient": "QueryServiceAsyncClient", - "rpcs": { - "QueryTimeSeries": { - "methods": [ - "query_time_series" - ] - } - } - } - } - }, - "ServiceMonitoringService": { - "clients": { - "grpc": { - "libraryClient": "ServiceMonitoringServiceClient", - "rpcs": { - "CreateService": { - "methods": [ - "create_service" - ] - }, - "CreateServiceLevelObjective": { - "methods": [ - "create_service_level_objective" - ] - }, - "DeleteService": { - "methods": [ - "delete_service" - ] - }, - "DeleteServiceLevelObjective": { - "methods": [ - "delete_service_level_objective" - ] - }, - "GetService": { - "methods": [ - "get_service" - ] - }, - "GetServiceLevelObjective": { - "methods": [ - "get_service_level_objective" - ] - }, - "ListServiceLevelObjectives": { - "methods": [ - "list_service_level_objectives" - ] - }, - "ListServices": { - "methods": [ - "list_services" - ] - }, - "UpdateService": { - "methods": [ - "update_service" - ] - }, - "UpdateServiceLevelObjective": { - "methods": [ - "update_service_level_objective" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ServiceMonitoringServiceAsyncClient", - "rpcs": { - "CreateService": { - "methods": [ - "create_service" - ] - }, - "CreateServiceLevelObjective": { - "methods": [ - "create_service_level_objective" - ] - }, - "DeleteService": { - "methods": [ - "delete_service" - ] - }, - "DeleteServiceLevelObjective": { - "methods": [ - "delete_service_level_objective" - ] - }, - "GetService": { - "methods": [ - "get_service" - ] - }, - "GetServiceLevelObjective": { - "methods": [ - "get_service_level_objective" - ] - }, - "ListServiceLevelObjectives": { - "methods": [ - "list_service_level_objectives" - ] - }, - "ListServices": { - "methods": [ - "list_services" - ] - }, - "UpdateService": { - "methods": [ - "update_service" - ] - }, - "UpdateServiceLevelObjective": { - "methods": [ - "update_service_level_objective" - ] - } - } - } - } - }, - "UptimeCheckService": { - "clients": { - "grpc": { - "libraryClient": "UptimeCheckServiceClient", - "rpcs": { - "CreateUptimeCheckConfig": { - "methods": [ - "create_uptime_check_config" - ] - }, - "DeleteUptimeCheckConfig": { - "methods": [ - "delete_uptime_check_config" - ] - }, - "GetUptimeCheckConfig": { - "methods": [ - "get_uptime_check_config" - ] - }, - "ListUptimeCheckConfigs": { - "methods": [ - "list_uptime_check_configs" - ] - }, - "ListUptimeCheckIps": { - "methods": [ - "list_uptime_check_ips" - ] - }, - "UpdateUptimeCheckConfig": { - "methods": [ - "update_uptime_check_config" - ] - } - } - }, - "grpc-async": { - "libraryClient": "UptimeCheckServiceAsyncClient", - "rpcs": { - "CreateUptimeCheckConfig": { - "methods": [ - "create_uptime_check_config" - ] - }, - "DeleteUptimeCheckConfig": { - "methods": [ - "delete_uptime_check_config" - ] - }, - "GetUptimeCheckConfig": { - "methods": [ - "get_uptime_check_config" - ] - }, - "ListUptimeCheckConfigs": { - "methods": [ - "list_uptime_check_configs" - ] - }, - "ListUptimeCheckIps": { - "methods": [ - "list_uptime_check_ips" - ] - }, - "UpdateUptimeCheckConfig": { - "methods": [ - "update_uptime_check_config" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/py.typed b/owl-bot-staging/v3/google/cloud/monitoring_v3/py.typed deleted file mode 100644 index 55d895b0..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-monitoring package uses inline types. diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/__init__.py deleted file mode 100644 index 4de65971..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/__init__.py deleted file mode 100644 index a66d4d58..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AlertPolicyServiceClient -from .async_client import AlertPolicyServiceAsyncClient - -__all__ = ( - 'AlertPolicyServiceClient', - 'AlertPolicyServiceAsyncClient', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py deleted file mode 100644 index 7e36ea78..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py +++ /dev/null @@ -1,673 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.services.alert_policy_service import pagers -from google.cloud.monitoring_v3.types import alert -from google.cloud.monitoring_v3.types import alert_service -from google.cloud.monitoring_v3.types import mutation_record -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import AlertPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AlertPolicyServiceGrpcAsyncIOTransport -from .client import AlertPolicyServiceClient - - -class AlertPolicyServiceAsyncClient: - """The AlertPolicyService API is used to manage (list, create, delete, - edit) alert policies in Stackdriver Monitoring. An alerting policy - is a description of the conditions under which some aspect of your - system is considered to be "unhealthy" and the ways to notify people - or services about this state. In addition to using this API, alert - policies can also be managed through `Stackdriver - Monitoring `__, which can - be reached by clicking the "Monitoring" tab in `Cloud - Console `__. - """ - - _client: AlertPolicyServiceClient - - DEFAULT_ENDPOINT = AlertPolicyServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AlertPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - - alert_policy_path = staticmethod(AlertPolicyServiceClient.alert_policy_path) - parse_alert_policy_path = staticmethod(AlertPolicyServiceClient.parse_alert_policy_path) - alert_policy_condition_path = staticmethod(AlertPolicyServiceClient.alert_policy_condition_path) - parse_alert_policy_condition_path = staticmethod(AlertPolicyServiceClient.parse_alert_policy_condition_path) - common_billing_account_path = staticmethod(AlertPolicyServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AlertPolicyServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AlertPolicyServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AlertPolicyServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AlertPolicyServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AlertPolicyServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AlertPolicyServiceClient.common_project_path) - parse_common_project_path = staticmethod(AlertPolicyServiceClient.parse_common_project_path) - common_location_path = staticmethod(AlertPolicyServiceClient.common_location_path) - parse_common_location_path = staticmethod(AlertPolicyServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AlertPolicyServiceAsyncClient: The constructed client. - """ - return AlertPolicyServiceClient.from_service_account_info.__func__(AlertPolicyServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AlertPolicyServiceAsyncClient: The constructed client. - """ - return AlertPolicyServiceClient.from_service_account_file.__func__(AlertPolicyServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AlertPolicyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AlertPolicyServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(AlertPolicyServiceClient).get_transport_class, type(AlertPolicyServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, AlertPolicyServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the alert policy service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.AlertPolicyServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AlertPolicyServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_alert_policies(self, - request: alert_service.ListAlertPoliciesRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAlertPoliciesAsyncPager: - r"""Lists the existing alerting policies for the - workspace. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListAlertPoliciesRequest`): - The request object. The protocol for the - `ListAlertPolicies` request. - name (:class:`str`): - Required. The - `project `__ - whose alert policies are to be listed. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - Note that this field names the parent container in which - the alerting policies to be listed are stored. To - retrieve a single alerting policy by name, use the - [GetAlertPolicy][google.monitoring.v3.AlertPolicyService.GetAlertPolicy] - operation, instead. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.alert_policy_service.pagers.ListAlertPoliciesAsyncPager: - The protocol for the ListAlertPolicies response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = alert_service.ListAlertPoliciesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_alert_policies, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAlertPoliciesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_alert_policy(self, - request: alert_service.GetAlertPolicyRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> alert.AlertPolicy: - r"""Gets a single alerting policy. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetAlertPolicyRequest`): - The request object. The protocol for the - `GetAlertPolicy` request. - name (:class:`str`): - Required. The alerting policy to retrieve. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.AlertPolicy: - A description of the conditions under which some aspect of your system is - considered to be "unhealthy" and the ways to notify - people or services about this state. For an overview - of alert policies, see [Introduction to - Alerting](\ https://cloud.google.com/monitoring/alerts/). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = alert_service.GetAlertPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_alert_policy, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_alert_policy(self, - request: alert_service.CreateAlertPolicyRequest = None, - *, - name: str = None, - alert_policy: alert.AlertPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> alert.AlertPolicy: - r"""Creates a new alerting policy. - - Args: - request (:class:`google.cloud.monitoring_v3.types.CreateAlertPolicyRequest`): - The request object. The protocol for the - `CreateAlertPolicy` request. - name (:class:`str`): - Required. The - `project `__ - in which to create the alerting policy. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - Note that this field names the parent container in which - the alerting policy will be written, not the name of the - created policy. \|name\| must be a host project of a - workspace, otherwise INVALID_ARGUMENT error will return. - The alerting policy that is returned will have a name - that contains a normalized representation of this name - as a prefix but adds a suffix of the form - ``/alertPolicies/[ALERT_POLICY_ID]``, identifying the - policy in the container. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - alert_policy (:class:`google.cloud.monitoring_v3.types.AlertPolicy`): - Required. The requested alerting policy. You should omit - the ``name`` field in this policy. The name will be - returned in the new policy, including a new - ``[ALERT_POLICY_ID]`` value. - - This corresponds to the ``alert_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.AlertPolicy: - A description of the conditions under which some aspect of your system is - considered to be "unhealthy" and the ways to notify - people or services about this state. For an overview - of alert policies, see [Introduction to - Alerting](\ https://cloud.google.com/monitoring/alerts/). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, alert_policy]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = alert_service.CreateAlertPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if alert_policy is not None: - request.alert_policy = alert_policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_alert_policy, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_alert_policy(self, - request: alert_service.DeleteAlertPolicyRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an alerting policy. - - Args: - request (:class:`google.cloud.monitoring_v3.types.DeleteAlertPolicyRequest`): - The request object. The protocol for the - `DeleteAlertPolicy` request. - name (:class:`str`): - Required. The alerting policy to delete. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] - - For more information, see - [AlertPolicy][google.monitoring.v3.AlertPolicy]. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = alert_service.DeleteAlertPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_alert_policy, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_alert_policy(self, - request: alert_service.UpdateAlertPolicyRequest = None, - *, - update_mask: field_mask_pb2.FieldMask = None, - alert_policy: alert.AlertPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> alert.AlertPolicy: - r"""Updates an alerting policy. You can either replace the entire - policy with a new one or replace only certain fields in the - current alerting policy by specifying the fields to be updated - via ``updateMask``. Returns the updated alerting policy. - - Args: - request (:class:`google.cloud.monitoring_v3.types.UpdateAlertPolicyRequest`): - The request object. The protocol for the - `UpdateAlertPolicy` request. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. A list of alerting policy field names. If this - field is not empty, each listed field in the existing - alerting policy is set to the value of the corresponding - field in the supplied policy (``alert_policy``), or to - the field's default value if the field is not in the - supplied alerting policy. Fields not listed retain their - previous value. - - Examples of valid field masks include ``display_name``, - ``documentation``, ``documentation.content``, - ``documentation.mime_type``, ``user_labels``, - ``user_label.nameofkey``, ``enabled``, ``conditions``, - ``combiner``, etc. - - If this field is empty, then the supplied alerting - policy replaces the existing policy. It is the same as - deleting the existing policy and adding the supplied - policy, except for the following: - - - The new policy will have the same - ``[ALERT_POLICY_ID]`` as the former policy. This - gives you continuity with the former policy in your - notifications and incidents. - - Conditions in the new policy will keep their former - ``[CONDITION_ID]`` if the supplied condition includes - the ``name`` field with that ``[CONDITION_ID]``. If - the supplied condition omits the ``name`` field, then - a new ``[CONDITION_ID]`` is created. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - alert_policy (:class:`google.cloud.monitoring_v3.types.AlertPolicy`): - Required. The updated alerting policy or the updated - values for the fields listed in ``update_mask``. If - ``update_mask`` is not empty, any fields in this policy - that are not in ``update_mask`` are ignored. - - This corresponds to the ``alert_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.AlertPolicy: - A description of the conditions under which some aspect of your system is - considered to be "unhealthy" and the ways to notify - people or services about this state. For an overview - of alert policies, see [Introduction to - Alerting](\ https://cloud.google.com/monitoring/alerts/). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, alert_policy]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = alert_service.UpdateAlertPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if update_mask is not None: - request.update_mask = update_mask - if alert_policy is not None: - request.alert_policy = alert_policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_alert_policy, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("alert_policy.name", request.alert_policy.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "AlertPolicyServiceAsyncClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/client.py deleted file mode 100644 index b9aee512..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/client.py +++ /dev/null @@ -1,855 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.services.alert_policy_service import pagers -from google.cloud.monitoring_v3.types import alert -from google.cloud.monitoring_v3.types import alert_service -from google.cloud.monitoring_v3.types import mutation_record -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import AlertPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AlertPolicyServiceGrpcTransport -from .transports.grpc_asyncio import AlertPolicyServiceGrpcAsyncIOTransport - - -class AlertPolicyServiceClientMeta(type): - """Metaclass for the AlertPolicyService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AlertPolicyServiceTransport]] - _transport_registry["grpc"] = AlertPolicyServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AlertPolicyServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[AlertPolicyServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AlertPolicyServiceClient(metaclass=AlertPolicyServiceClientMeta): - """The AlertPolicyService API is used to manage (list, create, delete, - edit) alert policies in Stackdriver Monitoring. An alerting policy - is a description of the conditions under which some aspect of your - system is considered to be "unhealthy" and the ways to notify people - or services about this state. In addition to using this API, alert - policies can also be managed through `Stackdriver - Monitoring `__, which can - be reached by clicking the "Monitoring" tab in `Cloud - Console `__. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "monitoring.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AlertPolicyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AlertPolicyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AlertPolicyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AlertPolicyServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def alert_policy_path(project: str,alert_policy: str,) -> str: - """Returns a fully-qualified alert_policy string.""" - return "projects/{project}/alertPolicies/{alert_policy}".format(project=project, alert_policy=alert_policy, ) - - @staticmethod - def parse_alert_policy_path(path: str) -> Dict[str,str]: - """Parses a alert_policy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/alertPolicies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def alert_policy_condition_path(project: str,alert_policy: str,condition: str,) -> str: - """Returns a fully-qualified alert_policy_condition string.""" - return "projects/{project}/alertPolicies/{alert_policy}/conditions/{condition}".format(project=project, alert_policy=alert_policy, condition=condition, ) - - @staticmethod - def parse_alert_policy_condition_path(path: str) -> Dict[str,str]: - """Parses a alert_policy_condition path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/alertPolicies/(?P.+?)/conditions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, AlertPolicyServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the alert policy service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, AlertPolicyServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, AlertPolicyServiceTransport): - # transport is a AlertPolicyServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def list_alert_policies(self, - request: alert_service.ListAlertPoliciesRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAlertPoliciesPager: - r"""Lists the existing alerting policies for the - workspace. - - Args: - request (google.cloud.monitoring_v3.types.ListAlertPoliciesRequest): - The request object. The protocol for the - `ListAlertPolicies` request. - name (str): - Required. The - `project `__ - whose alert policies are to be listed. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - Note that this field names the parent container in which - the alerting policies to be listed are stored. To - retrieve a single alerting policy by name, use the - [GetAlertPolicy][google.monitoring.v3.AlertPolicyService.GetAlertPolicy] - operation, instead. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.alert_policy_service.pagers.ListAlertPoliciesPager: - The protocol for the ListAlertPolicies response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a alert_service.ListAlertPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, alert_service.ListAlertPoliciesRequest): - request = alert_service.ListAlertPoliciesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_alert_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAlertPoliciesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_alert_policy(self, - request: alert_service.GetAlertPolicyRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> alert.AlertPolicy: - r"""Gets a single alerting policy. - - Args: - request (google.cloud.monitoring_v3.types.GetAlertPolicyRequest): - The request object. The protocol for the - `GetAlertPolicy` request. - name (str): - Required. The alerting policy to retrieve. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.AlertPolicy: - A description of the conditions under which some aspect of your system is - considered to be "unhealthy" and the ways to notify - people or services about this state. For an overview - of alert policies, see [Introduction to - Alerting](\ https://cloud.google.com/monitoring/alerts/). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a alert_service.GetAlertPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, alert_service.GetAlertPolicyRequest): - request = alert_service.GetAlertPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_alert_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_alert_policy(self, - request: alert_service.CreateAlertPolicyRequest = None, - *, - name: str = None, - alert_policy: alert.AlertPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> alert.AlertPolicy: - r"""Creates a new alerting policy. - - Args: - request (google.cloud.monitoring_v3.types.CreateAlertPolicyRequest): - The request object. The protocol for the - `CreateAlertPolicy` request. - name (str): - Required. The - `project `__ - in which to create the alerting policy. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - Note that this field names the parent container in which - the alerting policy will be written, not the name of the - created policy. \|name\| must be a host project of a - workspace, otherwise INVALID_ARGUMENT error will return. - The alerting policy that is returned will have a name - that contains a normalized representation of this name - as a prefix but adds a suffix of the form - ``/alertPolicies/[ALERT_POLICY_ID]``, identifying the - policy in the container. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - alert_policy (google.cloud.monitoring_v3.types.AlertPolicy): - Required. The requested alerting policy. You should omit - the ``name`` field in this policy. The name will be - returned in the new policy, including a new - ``[ALERT_POLICY_ID]`` value. - - This corresponds to the ``alert_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.AlertPolicy: - A description of the conditions under which some aspect of your system is - considered to be "unhealthy" and the ways to notify - people or services about this state. For an overview - of alert policies, see [Introduction to - Alerting](\ https://cloud.google.com/monitoring/alerts/). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, alert_policy]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a alert_service.CreateAlertPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, alert_service.CreateAlertPolicyRequest): - request = alert_service.CreateAlertPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if alert_policy is not None: - request.alert_policy = alert_policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_alert_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_alert_policy(self, - request: alert_service.DeleteAlertPolicyRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an alerting policy. - - Args: - request (google.cloud.monitoring_v3.types.DeleteAlertPolicyRequest): - The request object. The protocol for the - `DeleteAlertPolicy` request. - name (str): - Required. The alerting policy to delete. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] - - For more information, see - [AlertPolicy][google.monitoring.v3.AlertPolicy]. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a alert_service.DeleteAlertPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, alert_service.DeleteAlertPolicyRequest): - request = alert_service.DeleteAlertPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_alert_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_alert_policy(self, - request: alert_service.UpdateAlertPolicyRequest = None, - *, - update_mask: field_mask_pb2.FieldMask = None, - alert_policy: alert.AlertPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> alert.AlertPolicy: - r"""Updates an alerting policy. You can either replace the entire - policy with a new one or replace only certain fields in the - current alerting policy by specifying the fields to be updated - via ``updateMask``. Returns the updated alerting policy. - - Args: - request (google.cloud.monitoring_v3.types.UpdateAlertPolicyRequest): - The request object. The protocol for the - `UpdateAlertPolicy` request. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. A list of alerting policy field names. If this - field is not empty, each listed field in the existing - alerting policy is set to the value of the corresponding - field in the supplied policy (``alert_policy``), or to - the field's default value if the field is not in the - supplied alerting policy. Fields not listed retain their - previous value. - - Examples of valid field masks include ``display_name``, - ``documentation``, ``documentation.content``, - ``documentation.mime_type``, ``user_labels``, - ``user_label.nameofkey``, ``enabled``, ``conditions``, - ``combiner``, etc. - - If this field is empty, then the supplied alerting - policy replaces the existing policy. It is the same as - deleting the existing policy and adding the supplied - policy, except for the following: - - - The new policy will have the same - ``[ALERT_POLICY_ID]`` as the former policy. This - gives you continuity with the former policy in your - notifications and incidents. - - Conditions in the new policy will keep their former - ``[CONDITION_ID]`` if the supplied condition includes - the ``name`` field with that ``[CONDITION_ID]``. If - the supplied condition omits the ``name`` field, then - a new ``[CONDITION_ID]`` is created. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - alert_policy (google.cloud.monitoring_v3.types.AlertPolicy): - Required. The updated alerting policy or the updated - values for the fields listed in ``update_mask``. If - ``update_mask`` is not empty, any fields in this policy - that are not in ``update_mask`` are ignored. - - This corresponds to the ``alert_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.AlertPolicy: - A description of the conditions under which some aspect of your system is - considered to be "unhealthy" and the ways to notify - people or services about this state. For an overview - of alert policies, see [Introduction to - Alerting](\ https://cloud.google.com/monitoring/alerts/). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, alert_policy]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a alert_service.UpdateAlertPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, alert_service.UpdateAlertPolicyRequest): - request = alert_service.UpdateAlertPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if update_mask is not None: - request.update_mask = update_mask - if alert_policy is not None: - request.alert_policy = alert_policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_alert_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("alert_policy.name", request.alert_policy.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "AlertPolicyServiceClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/pagers.py deleted file mode 100644 index adcc60ac..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/pagers.py +++ /dev/null @@ -1,141 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.monitoring_v3.types import alert -from google.cloud.monitoring_v3.types import alert_service - - -class ListAlertPoliciesPager: - """A pager for iterating through ``list_alert_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListAlertPoliciesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``alert_policies`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAlertPolicies`` requests and continue to iterate - through the ``alert_policies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListAlertPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., alert_service.ListAlertPoliciesResponse], - request: alert_service.ListAlertPoliciesRequest, - response: alert_service.ListAlertPoliciesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListAlertPoliciesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListAlertPoliciesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = alert_service.ListAlertPoliciesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[alert_service.ListAlertPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[alert.AlertPolicy]: - for page in self.pages: - yield from page.alert_policies - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAlertPoliciesAsyncPager: - """A pager for iterating through ``list_alert_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListAlertPoliciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``alert_policies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAlertPolicies`` requests and continue to iterate - through the ``alert_policies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListAlertPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[alert_service.ListAlertPoliciesResponse]], - request: alert_service.ListAlertPoliciesRequest, - response: alert_service.ListAlertPoliciesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListAlertPoliciesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListAlertPoliciesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = alert_service.ListAlertPoliciesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[alert_service.ListAlertPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[alert.AlertPolicy]: - async def async_generator(): - async for page in self.pages: - for response in page.alert_policies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/__init__.py deleted file mode 100644 index 6babe610..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AlertPolicyServiceTransport -from .grpc import AlertPolicyServiceGrpcTransport -from .grpc_asyncio import AlertPolicyServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AlertPolicyServiceTransport]] -_transport_registry['grpc'] = AlertPolicyServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AlertPolicyServiceGrpcAsyncIOTransport - -__all__ = ( - 'AlertPolicyServiceTransport', - 'AlertPolicyServiceGrpcTransport', - 'AlertPolicyServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py deleted file mode 100644 index db4d23af..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py +++ /dev/null @@ -1,246 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.types import alert -from google.cloud.monitoring_v3.types import alert_service -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-monitoring', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class AlertPolicyServiceTransport(abc.ABC): - """Abstract transport class for AlertPolicyService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ) - - DEFAULT_HOST: str = 'monitoring.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_alert_policies: gapic_v1.method.wrap_method( - self.list_alert_policies, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.get_alert_policy: gapic_v1.method.wrap_method( - self.get_alert_policy, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.create_alert_policy: gapic_v1.method.wrap_method( - self.create_alert_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.delete_alert_policy: gapic_v1.method.wrap_method( - self.delete_alert_policy, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.update_alert_policy: gapic_v1.method.wrap_method( - self.update_alert_policy, - default_timeout=30.0, - client_info=client_info, - ), - } - - @property - def list_alert_policies(self) -> Callable[ - [alert_service.ListAlertPoliciesRequest], - Union[ - alert_service.ListAlertPoliciesResponse, - Awaitable[alert_service.ListAlertPoliciesResponse] - ]]: - raise NotImplementedError() - - @property - def get_alert_policy(self) -> Callable[ - [alert_service.GetAlertPolicyRequest], - Union[ - alert.AlertPolicy, - Awaitable[alert.AlertPolicy] - ]]: - raise NotImplementedError() - - @property - def create_alert_policy(self) -> Callable[ - [alert_service.CreateAlertPolicyRequest], - Union[ - alert.AlertPolicy, - Awaitable[alert.AlertPolicy] - ]]: - raise NotImplementedError() - - @property - def delete_alert_policy(self) -> Callable[ - [alert_service.DeleteAlertPolicyRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_alert_policy(self) -> Callable[ - [alert_service.UpdateAlertPolicyRequest], - Union[ - alert.AlertPolicy, - Awaitable[alert.AlertPolicy] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'AlertPolicyServiceTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py deleted file mode 100644 index 3d144ee4..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py +++ /dev/null @@ -1,370 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.monitoring_v3.types import alert -from google.cloud.monitoring_v3.types import alert_service -from google.protobuf import empty_pb2 # type: ignore -from .base import AlertPolicyServiceTransport, DEFAULT_CLIENT_INFO - - -class AlertPolicyServiceGrpcTransport(AlertPolicyServiceTransport): - """gRPC backend transport for AlertPolicyService. - - The AlertPolicyService API is used to manage (list, create, delete, - edit) alert policies in Stackdriver Monitoring. An alerting policy - is a description of the conditions under which some aspect of your - system is considered to be "unhealthy" and the ways to notify people - or services about this state. In addition to using this API, alert - policies can also be managed through `Stackdriver - Monitoring `__, which can - be reached by clicking the "Monitoring" tab in `Cloud - Console `__. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_alert_policies(self) -> Callable[ - [alert_service.ListAlertPoliciesRequest], - alert_service.ListAlertPoliciesResponse]: - r"""Return a callable for the list alert policies method over gRPC. - - Lists the existing alerting policies for the - workspace. - - Returns: - Callable[[~.ListAlertPoliciesRequest], - ~.ListAlertPoliciesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_alert_policies' not in self._stubs: - self._stubs['list_alert_policies'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/ListAlertPolicies', - request_serializer=alert_service.ListAlertPoliciesRequest.serialize, - response_deserializer=alert_service.ListAlertPoliciesResponse.deserialize, - ) - return self._stubs['list_alert_policies'] - - @property - def get_alert_policy(self) -> Callable[ - [alert_service.GetAlertPolicyRequest], - alert.AlertPolicy]: - r"""Return a callable for the get alert policy method over gRPC. - - Gets a single alerting policy. - - Returns: - Callable[[~.GetAlertPolicyRequest], - ~.AlertPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_alert_policy' not in self._stubs: - self._stubs['get_alert_policy'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/GetAlertPolicy', - request_serializer=alert_service.GetAlertPolicyRequest.serialize, - response_deserializer=alert.AlertPolicy.deserialize, - ) - return self._stubs['get_alert_policy'] - - @property - def create_alert_policy(self) -> Callable[ - [alert_service.CreateAlertPolicyRequest], - alert.AlertPolicy]: - r"""Return a callable for the create alert policy method over gRPC. - - Creates a new alerting policy. - - Returns: - Callable[[~.CreateAlertPolicyRequest], - ~.AlertPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_alert_policy' not in self._stubs: - self._stubs['create_alert_policy'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/CreateAlertPolicy', - request_serializer=alert_service.CreateAlertPolicyRequest.serialize, - response_deserializer=alert.AlertPolicy.deserialize, - ) - return self._stubs['create_alert_policy'] - - @property - def delete_alert_policy(self) -> Callable[ - [alert_service.DeleteAlertPolicyRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete alert policy method over gRPC. - - Deletes an alerting policy. - - Returns: - Callable[[~.DeleteAlertPolicyRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_alert_policy' not in self._stubs: - self._stubs['delete_alert_policy'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/DeleteAlertPolicy', - request_serializer=alert_service.DeleteAlertPolicyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_alert_policy'] - - @property - def update_alert_policy(self) -> Callable[ - [alert_service.UpdateAlertPolicyRequest], - alert.AlertPolicy]: - r"""Return a callable for the update alert policy method over gRPC. - - Updates an alerting policy. You can either replace the entire - policy with a new one or replace only certain fields in the - current alerting policy by specifying the fields to be updated - via ``updateMask``. Returns the updated alerting policy. - - Returns: - Callable[[~.UpdateAlertPolicyRequest], - ~.AlertPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_alert_policy' not in self._stubs: - self._stubs['update_alert_policy'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/UpdateAlertPolicy', - request_serializer=alert_service.UpdateAlertPolicyRequest.serialize, - response_deserializer=alert.AlertPolicy.deserialize, - ) - return self._stubs['update_alert_policy'] - - -__all__ = ( - 'AlertPolicyServiceGrpcTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py deleted file mode 100644 index 4d0c9160..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,374 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.monitoring_v3.types import alert -from google.cloud.monitoring_v3.types import alert_service -from google.protobuf import empty_pb2 # type: ignore -from .base import AlertPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AlertPolicyServiceGrpcTransport - - -class AlertPolicyServiceGrpcAsyncIOTransport(AlertPolicyServiceTransport): - """gRPC AsyncIO backend transport for AlertPolicyService. - - The AlertPolicyService API is used to manage (list, create, delete, - edit) alert policies in Stackdriver Monitoring. An alerting policy - is a description of the conditions under which some aspect of your - system is considered to be "unhealthy" and the ways to notify people - or services about this state. In addition to using this API, alert - policies can also be managed through `Stackdriver - Monitoring `__, which can - be reached by clicking the "Monitoring" tab in `Cloud - Console `__. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_alert_policies(self) -> Callable[ - [alert_service.ListAlertPoliciesRequest], - Awaitable[alert_service.ListAlertPoliciesResponse]]: - r"""Return a callable for the list alert policies method over gRPC. - - Lists the existing alerting policies for the - workspace. - - Returns: - Callable[[~.ListAlertPoliciesRequest], - Awaitable[~.ListAlertPoliciesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_alert_policies' not in self._stubs: - self._stubs['list_alert_policies'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/ListAlertPolicies', - request_serializer=alert_service.ListAlertPoliciesRequest.serialize, - response_deserializer=alert_service.ListAlertPoliciesResponse.deserialize, - ) - return self._stubs['list_alert_policies'] - - @property - def get_alert_policy(self) -> Callable[ - [alert_service.GetAlertPolicyRequest], - Awaitable[alert.AlertPolicy]]: - r"""Return a callable for the get alert policy method over gRPC. - - Gets a single alerting policy. - - Returns: - Callable[[~.GetAlertPolicyRequest], - Awaitable[~.AlertPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_alert_policy' not in self._stubs: - self._stubs['get_alert_policy'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/GetAlertPolicy', - request_serializer=alert_service.GetAlertPolicyRequest.serialize, - response_deserializer=alert.AlertPolicy.deserialize, - ) - return self._stubs['get_alert_policy'] - - @property - def create_alert_policy(self) -> Callable[ - [alert_service.CreateAlertPolicyRequest], - Awaitable[alert.AlertPolicy]]: - r"""Return a callable for the create alert policy method over gRPC. - - Creates a new alerting policy. - - Returns: - Callable[[~.CreateAlertPolicyRequest], - Awaitable[~.AlertPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_alert_policy' not in self._stubs: - self._stubs['create_alert_policy'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/CreateAlertPolicy', - request_serializer=alert_service.CreateAlertPolicyRequest.serialize, - response_deserializer=alert.AlertPolicy.deserialize, - ) - return self._stubs['create_alert_policy'] - - @property - def delete_alert_policy(self) -> Callable[ - [alert_service.DeleteAlertPolicyRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete alert policy method over gRPC. - - Deletes an alerting policy. - - Returns: - Callable[[~.DeleteAlertPolicyRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_alert_policy' not in self._stubs: - self._stubs['delete_alert_policy'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/DeleteAlertPolicy', - request_serializer=alert_service.DeleteAlertPolicyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_alert_policy'] - - @property - def update_alert_policy(self) -> Callable[ - [alert_service.UpdateAlertPolicyRequest], - Awaitable[alert.AlertPolicy]]: - r"""Return a callable for the update alert policy method over gRPC. - - Updates an alerting policy. You can either replace the entire - policy with a new one or replace only certain fields in the - current alerting policy by specifying the fields to be updated - via ``updateMask``. Returns the updated alerting policy. - - Returns: - Callable[[~.UpdateAlertPolicyRequest], - Awaitable[~.AlertPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_alert_policy' not in self._stubs: - self._stubs['update_alert_policy'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.AlertPolicyService/UpdateAlertPolicy', - request_serializer=alert_service.UpdateAlertPolicyRequest.serialize, - response_deserializer=alert.AlertPolicy.deserialize, - ) - return self._stubs['update_alert_policy'] - - -__all__ = ( - 'AlertPolicyServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/__init__.py deleted file mode 100644 index a59c4473..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import GroupServiceClient -from .async_client import GroupServiceAsyncClient - -__all__ = ( - 'GroupServiceClient', - 'GroupServiceAsyncClient', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/async_client.py deleted file mode 100644 index 09a3e390..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/async_client.py +++ /dev/null @@ -1,793 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.services.group_service import pagers -from google.cloud.monitoring_v3.types import group -from google.cloud.monitoring_v3.types import group as gm_group -from google.cloud.monitoring_v3.types import group_service -from .transports.base import GroupServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import GroupServiceGrpcAsyncIOTransport -from .client import GroupServiceClient - - -class GroupServiceAsyncClient: - """The Group API lets you inspect and manage your - `groups <#google.monitoring.v3.Group>`__. - - A group is a named filter that is used to identify a collection of - monitored resources. Groups are typically used to mirror the - physical and/or logical topology of the environment. Because group - membership is computed dynamically, monitored resources that are - started in the future are automatically placed in matching groups. - By using a group to name monitored resources in, for example, an - alert policy, the target of that alert policy is updated - automatically as monitored resources are added and removed from the - infrastructure. - """ - - _client: GroupServiceClient - - DEFAULT_ENDPOINT = GroupServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = GroupServiceClient.DEFAULT_MTLS_ENDPOINT - - group_path = staticmethod(GroupServiceClient.group_path) - parse_group_path = staticmethod(GroupServiceClient.parse_group_path) - common_billing_account_path = staticmethod(GroupServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(GroupServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(GroupServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(GroupServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(GroupServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(GroupServiceClient.parse_common_organization_path) - common_project_path = staticmethod(GroupServiceClient.common_project_path) - parse_common_project_path = staticmethod(GroupServiceClient.parse_common_project_path) - common_location_path = staticmethod(GroupServiceClient.common_location_path) - parse_common_location_path = staticmethod(GroupServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GroupServiceAsyncClient: The constructed client. - """ - return GroupServiceClient.from_service_account_info.__func__(GroupServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GroupServiceAsyncClient: The constructed client. - """ - return GroupServiceClient.from_service_account_file.__func__(GroupServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> GroupServiceTransport: - """Returns the transport used by the client instance. - - Returns: - GroupServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(GroupServiceClient).get_transport_class, type(GroupServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, GroupServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the group service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.GroupServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = GroupServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_groups(self, - request: group_service.ListGroupsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListGroupsAsyncPager: - r"""Lists the existing groups. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListGroupsRequest`): - The request object. The `ListGroup` request. - name (:class:`str`): - Required. The - `project `__ - whose groups are to be listed. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.group_service.pagers.ListGroupsAsyncPager: - The ListGroups response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = group_service.ListGroupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_groups, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListGroupsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_group(self, - request: group_service.GetGroupRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> group.Group: - r"""Gets a single group. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetGroupRequest`): - The request object. The `GetGroup` request. - name (:class:`str`): - Required. The group to retrieve. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Group: - The description of a dynamic collection of monitored resources. Each group - has a filter that is matched against monitored - resources and their associated metadata. If a group's - filter matches an available monitored resource, then - that resource is a member of that group. Groups can - contain any number of monitored resources, and each - monitored resource can be a member of any number of - groups. - - Groups can be nested in parent-child hierarchies. The - parentName field identifies an optional parent for - each group. If a group has a parent, then the only - monitored resources available to be matched by the - group's filter are the resources contained in the - parent group. In other words, a group contains the - monitored resources that match its filter and the - filters of all the group's ancestors. A group without - a parent can contain any monitored resource. - - For example, consider an infrastructure running a set - of instances with two user-defined tags: - "environment" and "role". A parent group has a - filter, environment="production". A child of that - parent group has a filter, role="transcoder". The - parent group contains all instances in the production - environment, regardless of their roles. The child - group contains instances that have the transcoder - role *and* are in the production environment. - - The monitored resources contained in a group can - change at any moment, depending on what resources - exist and what filters are associated with the group - and its ancestors. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = group_service.GetGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_group, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_group(self, - request: group_service.CreateGroupRequest = None, - *, - name: str = None, - group: gm_group.Group = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gm_group.Group: - r"""Creates a new group. - - Args: - request (:class:`google.cloud.monitoring_v3.types.CreateGroupRequest`): - The request object. The `CreateGroup` request. - name (:class:`str`): - Required. The - `project `__ - in which to create the group. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - group (:class:`google.cloud.monitoring_v3.types.Group`): - Required. A group definition. It is an error to define - the ``name`` field because the system assigns the name. - - This corresponds to the ``group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Group: - The description of a dynamic collection of monitored resources. Each group - has a filter that is matched against monitored - resources and their associated metadata. If a group's - filter matches an available monitored resource, then - that resource is a member of that group. Groups can - contain any number of monitored resources, and each - monitored resource can be a member of any number of - groups. - - Groups can be nested in parent-child hierarchies. The - parentName field identifies an optional parent for - each group. If a group has a parent, then the only - monitored resources available to be matched by the - group's filter are the resources contained in the - parent group. In other words, a group contains the - monitored resources that match its filter and the - filters of all the group's ancestors. A group without - a parent can contain any monitored resource. - - For example, consider an infrastructure running a set - of instances with two user-defined tags: - "environment" and "role". A parent group has a - filter, environment="production". A child of that - parent group has a filter, role="transcoder". The - parent group contains all instances in the production - environment, regardless of their roles. The child - group contains instances that have the transcoder - role *and* are in the production environment. - - The monitored resources contained in a group can - change at any moment, depending on what resources - exist and what filters are associated with the group - and its ancestors. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, group]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = group_service.CreateGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if group is not None: - request.group = group - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_group, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_group(self, - request: group_service.UpdateGroupRequest = None, - *, - group: gm_group.Group = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gm_group.Group: - r"""Updates an existing group. You can change any group attributes - except ``name``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.UpdateGroupRequest`): - The request object. The `UpdateGroup` request. - group (:class:`google.cloud.monitoring_v3.types.Group`): - Required. The new definition of the group. All fields of - the existing group, excepting ``name``, are replaced - with the corresponding fields of this group. - - This corresponds to the ``group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Group: - The description of a dynamic collection of monitored resources. Each group - has a filter that is matched against monitored - resources and their associated metadata. If a group's - filter matches an available monitored resource, then - that resource is a member of that group. Groups can - contain any number of monitored resources, and each - monitored resource can be a member of any number of - groups. - - Groups can be nested in parent-child hierarchies. The - parentName field identifies an optional parent for - each group. If a group has a parent, then the only - monitored resources available to be matched by the - group's filter are the resources contained in the - parent group. In other words, a group contains the - monitored resources that match its filter and the - filters of all the group's ancestors. A group without - a parent can contain any monitored resource. - - For example, consider an infrastructure running a set - of instances with two user-defined tags: - "environment" and "role". A parent group has a - filter, environment="production". A child of that - parent group has a filter, role="transcoder". The - parent group contains all instances in the production - environment, regardless of their roles. The child - group contains instances that have the transcoder - role *and* are in the production environment. - - The monitored resources contained in a group can - change at any moment, depending on what resources - exist and what filters are associated with the group - and its ancestors. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([group]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = group_service.UpdateGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if group is not None: - request.group = group - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_group, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=180.0, - ), - default_timeout=180.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("group.name", request.group.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_group(self, - request: group_service.DeleteGroupRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an existing group. - - Args: - request (:class:`google.cloud.monitoring_v3.types.DeleteGroupRequest`): - The request object. The `DeleteGroup` request. The - default behavior is to be able to delete a single group - without any descendants. - name (:class:`str`): - Required. The group to delete. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = group_service.DeleteGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_group, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_group_members(self, - request: group_service.ListGroupMembersRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListGroupMembersAsyncPager: - r"""Lists the monitored resources that are members of a - group. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListGroupMembersRequest`): - The request object. The `ListGroupMembers` request. - name (:class:`str`): - Required. The group whose members are listed. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.group_service.pagers.ListGroupMembersAsyncPager: - The ListGroupMembers response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = group_service.ListGroupMembersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_group_members, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListGroupMembersAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "GroupServiceAsyncClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/client.py deleted file mode 100644 index 07e6f6f0..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/client.py +++ /dev/null @@ -1,954 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.services.group_service import pagers -from google.cloud.monitoring_v3.types import group -from google.cloud.monitoring_v3.types import group as gm_group -from google.cloud.monitoring_v3.types import group_service -from .transports.base import GroupServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import GroupServiceGrpcTransport -from .transports.grpc_asyncio import GroupServiceGrpcAsyncIOTransport - - -class GroupServiceClientMeta(type): - """Metaclass for the GroupService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[GroupServiceTransport]] - _transport_registry["grpc"] = GroupServiceGrpcTransport - _transport_registry["grpc_asyncio"] = GroupServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[GroupServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class GroupServiceClient(metaclass=GroupServiceClientMeta): - """The Group API lets you inspect and manage your - `groups <#google.monitoring.v3.Group>`__. - - A group is a named filter that is used to identify a collection of - monitored resources. Groups are typically used to mirror the - physical and/or logical topology of the environment. Because group - membership is computed dynamically, monitored resources that are - started in the future are automatically placed in matching groups. - By using a group to name monitored resources in, for example, an - alert policy, the target of that alert policy is updated - automatically as monitored resources are added and removed from the - infrastructure. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "monitoring.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GroupServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GroupServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> GroupServiceTransport: - """Returns the transport used by the client instance. - - Returns: - GroupServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def group_path(project: str,group: str,) -> str: - """Returns a fully-qualified group string.""" - return "projects/{project}/groups/{group}".format(project=project, group=group, ) - - @staticmethod - def parse_group_path(path: str) -> Dict[str,str]: - """Parses a group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/groups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, GroupServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the group service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, GroupServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, GroupServiceTransport): - # transport is a GroupServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def list_groups(self, - request: group_service.ListGroupsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListGroupsPager: - r"""Lists the existing groups. - - Args: - request (google.cloud.monitoring_v3.types.ListGroupsRequest): - The request object. The `ListGroup` request. - name (str): - Required. The - `project `__ - whose groups are to be listed. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.group_service.pagers.ListGroupsPager: - The ListGroups response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a group_service.ListGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, group_service.ListGroupsRequest): - request = group_service.ListGroupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_groups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListGroupsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_group(self, - request: group_service.GetGroupRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> group.Group: - r"""Gets a single group. - - Args: - request (google.cloud.monitoring_v3.types.GetGroupRequest): - The request object. The `GetGroup` request. - name (str): - Required. The group to retrieve. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Group: - The description of a dynamic collection of monitored resources. Each group - has a filter that is matched against monitored - resources and their associated metadata. If a group's - filter matches an available monitored resource, then - that resource is a member of that group. Groups can - contain any number of monitored resources, and each - monitored resource can be a member of any number of - groups. - - Groups can be nested in parent-child hierarchies. The - parentName field identifies an optional parent for - each group. If a group has a parent, then the only - monitored resources available to be matched by the - group's filter are the resources contained in the - parent group. In other words, a group contains the - monitored resources that match its filter and the - filters of all the group's ancestors. A group without - a parent can contain any monitored resource. - - For example, consider an infrastructure running a set - of instances with two user-defined tags: - "environment" and "role". A parent group has a - filter, environment="production". A child of that - parent group has a filter, role="transcoder". The - parent group contains all instances in the production - environment, regardless of their roles. The child - group contains instances that have the transcoder - role *and* are in the production environment. - - The monitored resources contained in a group can - change at any moment, depending on what resources - exist and what filters are associated with the group - and its ancestors. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a group_service.GetGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, group_service.GetGroupRequest): - request = group_service.GetGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_group(self, - request: group_service.CreateGroupRequest = None, - *, - name: str = None, - group: gm_group.Group = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gm_group.Group: - r"""Creates a new group. - - Args: - request (google.cloud.monitoring_v3.types.CreateGroupRequest): - The request object. The `CreateGroup` request. - name (str): - Required. The - `project `__ - in which to create the group. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - group (google.cloud.monitoring_v3.types.Group): - Required. A group definition. It is an error to define - the ``name`` field because the system assigns the name. - - This corresponds to the ``group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Group: - The description of a dynamic collection of monitored resources. Each group - has a filter that is matched against monitored - resources and their associated metadata. If a group's - filter matches an available monitored resource, then - that resource is a member of that group. Groups can - contain any number of monitored resources, and each - monitored resource can be a member of any number of - groups. - - Groups can be nested in parent-child hierarchies. The - parentName field identifies an optional parent for - each group. If a group has a parent, then the only - monitored resources available to be matched by the - group's filter are the resources contained in the - parent group. In other words, a group contains the - monitored resources that match its filter and the - filters of all the group's ancestors. A group without - a parent can contain any monitored resource. - - For example, consider an infrastructure running a set - of instances with two user-defined tags: - "environment" and "role". A parent group has a - filter, environment="production". A child of that - parent group has a filter, role="transcoder". The - parent group contains all instances in the production - environment, regardless of their roles. The child - group contains instances that have the transcoder - role *and* are in the production environment. - - The monitored resources contained in a group can - change at any moment, depending on what resources - exist and what filters are associated with the group - and its ancestors. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, group]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a group_service.CreateGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, group_service.CreateGroupRequest): - request = group_service.CreateGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if group is not None: - request.group = group - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_group(self, - request: group_service.UpdateGroupRequest = None, - *, - group: gm_group.Group = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gm_group.Group: - r"""Updates an existing group. You can change any group attributes - except ``name``. - - Args: - request (google.cloud.monitoring_v3.types.UpdateGroupRequest): - The request object. The `UpdateGroup` request. - group (google.cloud.monitoring_v3.types.Group): - Required. The new definition of the group. All fields of - the existing group, excepting ``name``, are replaced - with the corresponding fields of this group. - - This corresponds to the ``group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Group: - The description of a dynamic collection of monitored resources. Each group - has a filter that is matched against monitored - resources and their associated metadata. If a group's - filter matches an available monitored resource, then - that resource is a member of that group. Groups can - contain any number of monitored resources, and each - monitored resource can be a member of any number of - groups. - - Groups can be nested in parent-child hierarchies. The - parentName field identifies an optional parent for - each group. If a group has a parent, then the only - monitored resources available to be matched by the - group's filter are the resources contained in the - parent group. In other words, a group contains the - monitored resources that match its filter and the - filters of all the group's ancestors. A group without - a parent can contain any monitored resource. - - For example, consider an infrastructure running a set - of instances with two user-defined tags: - "environment" and "role". A parent group has a - filter, environment="production". A child of that - parent group has a filter, role="transcoder". The - parent group contains all instances in the production - environment, regardless of their roles. The child - group contains instances that have the transcoder - role *and* are in the production environment. - - The monitored resources contained in a group can - change at any moment, depending on what resources - exist and what filters are associated with the group - and its ancestors. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([group]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a group_service.UpdateGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, group_service.UpdateGroupRequest): - request = group_service.UpdateGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if group is not None: - request.group = group - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("group.name", request.group.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_group(self, - request: group_service.DeleteGroupRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an existing group. - - Args: - request (google.cloud.monitoring_v3.types.DeleteGroupRequest): - The request object. The `DeleteGroup` request. The - default behavior is to be able to delete a single group - without any descendants. - name (str): - Required. The group to delete. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a group_service.DeleteGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, group_service.DeleteGroupRequest): - request = group_service.DeleteGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_group_members(self, - request: group_service.ListGroupMembersRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListGroupMembersPager: - r"""Lists the monitored resources that are members of a - group. - - Args: - request (google.cloud.monitoring_v3.types.ListGroupMembersRequest): - The request object. The `ListGroupMembers` request. - name (str): - Required. The group whose members are listed. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.group_service.pagers.ListGroupMembersPager: - The ListGroupMembers response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a group_service.ListGroupMembersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, group_service.ListGroupMembersRequest): - request = group_service.ListGroupMembersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_group_members] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListGroupMembersPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "GroupServiceClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/pagers.py deleted file mode 100644 index f6a50de1..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/pagers.py +++ /dev/null @@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.types import group -from google.cloud.monitoring_v3.types import group_service - - -class ListGroupsPager: - """A pager for iterating through ``list_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListGroupsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``group`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListGroups`` requests and continue to iterate - through the ``group`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., group_service.ListGroupsResponse], - request: group_service.ListGroupsRequest, - response: group_service.ListGroupsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListGroupsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListGroupsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = group_service.ListGroupsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[group_service.ListGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[group.Group]: - for page in self.pages: - yield from page.group - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListGroupsAsyncPager: - """A pager for iterating through ``list_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListGroupsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``group`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListGroups`` requests and continue to iterate - through the ``group`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[group_service.ListGroupsResponse]], - request: group_service.ListGroupsRequest, - response: group_service.ListGroupsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListGroupsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListGroupsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = group_service.ListGroupsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[group_service.ListGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[group.Group]: - async def async_generator(): - async for page in self.pages: - for response in page.group: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListGroupMembersPager: - """A pager for iterating through ``list_group_members`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListGroupMembersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``members`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListGroupMembers`` requests and continue to iterate - through the ``members`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListGroupMembersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., group_service.ListGroupMembersResponse], - request: group_service.ListGroupMembersRequest, - response: group_service.ListGroupMembersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListGroupMembersRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListGroupMembersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = group_service.ListGroupMembersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[group_service.ListGroupMembersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResource]: - for page in self.pages: - yield from page.members - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListGroupMembersAsyncPager: - """A pager for iterating through ``list_group_members`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListGroupMembersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``members`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListGroupMembers`` requests and continue to iterate - through the ``members`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListGroupMembersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[group_service.ListGroupMembersResponse]], - request: group_service.ListGroupMembersRequest, - response: group_service.ListGroupMembersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListGroupMembersRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListGroupMembersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = group_service.ListGroupMembersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[group_service.ListGroupMembersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[monitored_resource_pb2.MonitoredResource]: - async def async_generator(): - async for page in self.pages: - for response in page.members: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/__init__.py deleted file mode 100644 index e68392d4..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import GroupServiceTransport -from .grpc import GroupServiceGrpcTransport -from .grpc_asyncio import GroupServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[GroupServiceTransport]] -_transport_registry['grpc'] = GroupServiceGrpcTransport -_transport_registry['grpc_asyncio'] = GroupServiceGrpcAsyncIOTransport - -__all__ = ( - 'GroupServiceTransport', - 'GroupServiceGrpcTransport', - 'GroupServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/base.py deleted file mode 100644 index 8717ba53..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/base.py +++ /dev/null @@ -1,273 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.types import group -from google.cloud.monitoring_v3.types import group as gm_group -from google.cloud.monitoring_v3.types import group_service -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-monitoring', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class GroupServiceTransport(abc.ABC): - """Abstract transport class for GroupService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ) - - DEFAULT_HOST: str = 'monitoring.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_groups: gapic_v1.method.wrap_method( - self.list_groups, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.get_group: gapic_v1.method.wrap_method( - self.get_group, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.create_group: gapic_v1.method.wrap_method( - self.create_group, - default_timeout=30.0, - client_info=client_info, - ), - self.update_group: gapic_v1.method.wrap_method( - self.update_group, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=180.0, - ), - default_timeout=180.0, - client_info=client_info, - ), - self.delete_group: gapic_v1.method.wrap_method( - self.delete_group, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_group_members: gapic_v1.method.wrap_method( - self.list_group_members, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - } - - @property - def list_groups(self) -> Callable[ - [group_service.ListGroupsRequest], - Union[ - group_service.ListGroupsResponse, - Awaitable[group_service.ListGroupsResponse] - ]]: - raise NotImplementedError() - - @property - def get_group(self) -> Callable[ - [group_service.GetGroupRequest], - Union[ - group.Group, - Awaitable[group.Group] - ]]: - raise NotImplementedError() - - @property - def create_group(self) -> Callable[ - [group_service.CreateGroupRequest], - Union[ - gm_group.Group, - Awaitable[gm_group.Group] - ]]: - raise NotImplementedError() - - @property - def update_group(self) -> Callable[ - [group_service.UpdateGroupRequest], - Union[ - gm_group.Group, - Awaitable[gm_group.Group] - ]]: - raise NotImplementedError() - - @property - def delete_group(self) -> Callable[ - [group_service.DeleteGroupRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_group_members(self) -> Callable[ - [group_service.ListGroupMembersRequest], - Union[ - group_service.ListGroupMembersResponse, - Awaitable[group_service.ListGroupMembersResponse] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'GroupServiceTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc.py deleted file mode 100644 index 9dadfef7..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc.py +++ /dev/null @@ -1,398 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.monitoring_v3.types import group -from google.cloud.monitoring_v3.types import group as gm_group -from google.cloud.monitoring_v3.types import group_service -from google.protobuf import empty_pb2 # type: ignore -from .base import GroupServiceTransport, DEFAULT_CLIENT_INFO - - -class GroupServiceGrpcTransport(GroupServiceTransport): - """gRPC backend transport for GroupService. - - The Group API lets you inspect and manage your - `groups <#google.monitoring.v3.Group>`__. - - A group is a named filter that is used to identify a collection of - monitored resources. Groups are typically used to mirror the - physical and/or logical topology of the environment. Because group - membership is computed dynamically, monitored resources that are - started in the future are automatically placed in matching groups. - By using a group to name monitored resources in, for example, an - alert policy, the target of that alert policy is updated - automatically as monitored resources are added and removed from the - infrastructure. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_groups(self) -> Callable[ - [group_service.ListGroupsRequest], - group_service.ListGroupsResponse]: - r"""Return a callable for the list groups method over gRPC. - - Lists the existing groups. - - Returns: - Callable[[~.ListGroupsRequest], - ~.ListGroupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_groups' not in self._stubs: - self._stubs['list_groups'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/ListGroups', - request_serializer=group_service.ListGroupsRequest.serialize, - response_deserializer=group_service.ListGroupsResponse.deserialize, - ) - return self._stubs['list_groups'] - - @property - def get_group(self) -> Callable[ - [group_service.GetGroupRequest], - group.Group]: - r"""Return a callable for the get group method over gRPC. - - Gets a single group. - - Returns: - Callable[[~.GetGroupRequest], - ~.Group]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_group' not in self._stubs: - self._stubs['get_group'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/GetGroup', - request_serializer=group_service.GetGroupRequest.serialize, - response_deserializer=group.Group.deserialize, - ) - return self._stubs['get_group'] - - @property - def create_group(self) -> Callable[ - [group_service.CreateGroupRequest], - gm_group.Group]: - r"""Return a callable for the create group method over gRPC. - - Creates a new group. - - Returns: - Callable[[~.CreateGroupRequest], - ~.Group]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_group' not in self._stubs: - self._stubs['create_group'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/CreateGroup', - request_serializer=group_service.CreateGroupRequest.serialize, - response_deserializer=gm_group.Group.deserialize, - ) - return self._stubs['create_group'] - - @property - def update_group(self) -> Callable[ - [group_service.UpdateGroupRequest], - gm_group.Group]: - r"""Return a callable for the update group method over gRPC. - - Updates an existing group. You can change any group attributes - except ``name``. - - Returns: - Callable[[~.UpdateGroupRequest], - ~.Group]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_group' not in self._stubs: - self._stubs['update_group'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/UpdateGroup', - request_serializer=group_service.UpdateGroupRequest.serialize, - response_deserializer=gm_group.Group.deserialize, - ) - return self._stubs['update_group'] - - @property - def delete_group(self) -> Callable[ - [group_service.DeleteGroupRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete group method over gRPC. - - Deletes an existing group. - - Returns: - Callable[[~.DeleteGroupRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_group' not in self._stubs: - self._stubs['delete_group'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/DeleteGroup', - request_serializer=group_service.DeleteGroupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_group'] - - @property - def list_group_members(self) -> Callable[ - [group_service.ListGroupMembersRequest], - group_service.ListGroupMembersResponse]: - r"""Return a callable for the list group members method over gRPC. - - Lists the monitored resources that are members of a - group. - - Returns: - Callable[[~.ListGroupMembersRequest], - ~.ListGroupMembersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_group_members' not in self._stubs: - self._stubs['list_group_members'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/ListGroupMembers', - request_serializer=group_service.ListGroupMembersRequest.serialize, - response_deserializer=group_service.ListGroupMembersResponse.deserialize, - ) - return self._stubs['list_group_members'] - - -__all__ = ( - 'GroupServiceGrpcTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py deleted file mode 100644 index cf4b535e..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,402 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.monitoring_v3.types import group -from google.cloud.monitoring_v3.types import group as gm_group -from google.cloud.monitoring_v3.types import group_service -from google.protobuf import empty_pb2 # type: ignore -from .base import GroupServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import GroupServiceGrpcTransport - - -class GroupServiceGrpcAsyncIOTransport(GroupServiceTransport): - """gRPC AsyncIO backend transport for GroupService. - - The Group API lets you inspect and manage your - `groups <#google.monitoring.v3.Group>`__. - - A group is a named filter that is used to identify a collection of - monitored resources. Groups are typically used to mirror the - physical and/or logical topology of the environment. Because group - membership is computed dynamically, monitored resources that are - started in the future are automatically placed in matching groups. - By using a group to name monitored resources in, for example, an - alert policy, the target of that alert policy is updated - automatically as monitored resources are added and removed from the - infrastructure. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_groups(self) -> Callable[ - [group_service.ListGroupsRequest], - Awaitable[group_service.ListGroupsResponse]]: - r"""Return a callable for the list groups method over gRPC. - - Lists the existing groups. - - Returns: - Callable[[~.ListGroupsRequest], - Awaitable[~.ListGroupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_groups' not in self._stubs: - self._stubs['list_groups'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/ListGroups', - request_serializer=group_service.ListGroupsRequest.serialize, - response_deserializer=group_service.ListGroupsResponse.deserialize, - ) - return self._stubs['list_groups'] - - @property - def get_group(self) -> Callable[ - [group_service.GetGroupRequest], - Awaitable[group.Group]]: - r"""Return a callable for the get group method over gRPC. - - Gets a single group. - - Returns: - Callable[[~.GetGroupRequest], - Awaitable[~.Group]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_group' not in self._stubs: - self._stubs['get_group'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/GetGroup', - request_serializer=group_service.GetGroupRequest.serialize, - response_deserializer=group.Group.deserialize, - ) - return self._stubs['get_group'] - - @property - def create_group(self) -> Callable[ - [group_service.CreateGroupRequest], - Awaitable[gm_group.Group]]: - r"""Return a callable for the create group method over gRPC. - - Creates a new group. - - Returns: - Callable[[~.CreateGroupRequest], - Awaitable[~.Group]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_group' not in self._stubs: - self._stubs['create_group'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/CreateGroup', - request_serializer=group_service.CreateGroupRequest.serialize, - response_deserializer=gm_group.Group.deserialize, - ) - return self._stubs['create_group'] - - @property - def update_group(self) -> Callable[ - [group_service.UpdateGroupRequest], - Awaitable[gm_group.Group]]: - r"""Return a callable for the update group method over gRPC. - - Updates an existing group. You can change any group attributes - except ``name``. - - Returns: - Callable[[~.UpdateGroupRequest], - Awaitable[~.Group]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_group' not in self._stubs: - self._stubs['update_group'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/UpdateGroup', - request_serializer=group_service.UpdateGroupRequest.serialize, - response_deserializer=gm_group.Group.deserialize, - ) - return self._stubs['update_group'] - - @property - def delete_group(self) -> Callable[ - [group_service.DeleteGroupRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete group method over gRPC. - - Deletes an existing group. - - Returns: - Callable[[~.DeleteGroupRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_group' not in self._stubs: - self._stubs['delete_group'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/DeleteGroup', - request_serializer=group_service.DeleteGroupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_group'] - - @property - def list_group_members(self) -> Callable[ - [group_service.ListGroupMembersRequest], - Awaitable[group_service.ListGroupMembersResponse]]: - r"""Return a callable for the list group members method over gRPC. - - Lists the monitored resources that are members of a - group. - - Returns: - Callable[[~.ListGroupMembersRequest], - Awaitable[~.ListGroupMembersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_group_members' not in self._stubs: - self._stubs['list_group_members'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.GroupService/ListGroupMembers', - request_serializer=group_service.ListGroupMembersRequest.serialize, - response_deserializer=group_service.ListGroupMembersResponse.deserialize, - ) - return self._stubs['list_group_members'] - - -__all__ = ( - 'GroupServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/__init__.py deleted file mode 100644 index 836589ba..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MetricServiceClient -from .async_client import MetricServiceAsyncClient - -__all__ = ( - 'MetricServiceClient', - 'MetricServiceAsyncClient', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/async_client.py deleted file mode 100644 index 6ac8f37b..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/async_client.py +++ /dev/null @@ -1,967 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import label_pb2 # type: ignore -from google.api import launch_stage_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.services.metric_service import pagers -from google.cloud.monitoring_v3.types import common -from google.cloud.monitoring_v3.types import metric as gm_metric -from google.cloud.monitoring_v3.types import metric_service -from .transports.base import MetricServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MetricServiceGrpcAsyncIOTransport -from .client import MetricServiceClient - - -class MetricServiceAsyncClient: - """Manages metric descriptors, monitored resource descriptors, - and time series data. - """ - - _client: MetricServiceClient - - DEFAULT_ENDPOINT = MetricServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MetricServiceClient.DEFAULT_MTLS_ENDPOINT - - metric_descriptor_path = staticmethod(MetricServiceClient.metric_descriptor_path) - parse_metric_descriptor_path = staticmethod(MetricServiceClient.parse_metric_descriptor_path) - monitored_resource_descriptor_path = staticmethod(MetricServiceClient.monitored_resource_descriptor_path) - parse_monitored_resource_descriptor_path = staticmethod(MetricServiceClient.parse_monitored_resource_descriptor_path) - time_series_path = staticmethod(MetricServiceClient.time_series_path) - parse_time_series_path = staticmethod(MetricServiceClient.parse_time_series_path) - common_billing_account_path = staticmethod(MetricServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MetricServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(MetricServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(MetricServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(MetricServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(MetricServiceClient.parse_common_organization_path) - common_project_path = staticmethod(MetricServiceClient.common_project_path) - parse_common_project_path = staticmethod(MetricServiceClient.parse_common_project_path) - common_location_path = staticmethod(MetricServiceClient.common_location_path) - parse_common_location_path = staticmethod(MetricServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricServiceAsyncClient: The constructed client. - """ - return MetricServiceClient.from_service_account_info.__func__(MetricServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricServiceAsyncClient: The constructed client. - """ - return MetricServiceClient.from_service_account_file.__func__(MetricServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MetricServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetricServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(MetricServiceClient).get_transport_class, type(MetricServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, MetricServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metric service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.MetricServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MetricServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_monitored_resource_descriptors(self, - request: metric_service.ListMonitoredResourceDescriptorsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: - r"""Lists monitored resource descriptors that match a - filter. This method does not require a Workspace. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest`): - The request object. The - `ListMonitoredResourceDescriptors` request. - name (:class:`str`): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.metric_service.pagers.ListMonitoredResourceDescriptorsAsyncPager: - The ListMonitoredResourceDescriptors response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = metric_service.ListMonitoredResourceDescriptorsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_monitored_resource_descriptors, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMonitoredResourceDescriptorsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_monitored_resource_descriptor(self, - request: metric_service.GetMonitoredResourceDescriptorRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> monitored_resource_pb2.MonitoredResourceDescriptor: - r"""Gets a single monitored resource descriptor. This - method does not require a Workspace. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetMonitoredResourceDescriptorRequest`): - The request object. The `GetMonitoredResourceDescriptor` - request. - name (:class:`str`): - Required. The monitored resource descriptor to get. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/monitoredResourceDescriptors/[RESOURCE_TYPE] - - The ``[RESOURCE_TYPE]`` is a predefined type, such as - ``cloudsql_database``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.monitored_resource_pb2.MonitoredResourceDescriptor: - An object that describes the schema of a [MonitoredResource][google.api.MonitoredResource] object using a - type name and a set of labels. For example, the - monitored resource descriptor for Google Compute - Engine VM instances has a type of "gce_instance" and - specifies the use of the labels "instance_id" and - "zone" to identify particular VM instances. - - Different APIs can support different monitored - resource types. APIs generally provide a list method - that returns the monitored resource descriptors used - by the API. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = metric_service.GetMonitoredResourceDescriptorRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_monitored_resource_descriptor, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_metric_descriptors(self, - request: metric_service.ListMetricDescriptorsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMetricDescriptorsAsyncPager: - r"""Lists metric descriptors that match a filter. This - method does not require a Workspace. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest`): - The request object. The `ListMetricDescriptors` request. - name (:class:`str`): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.metric_service.pagers.ListMetricDescriptorsAsyncPager: - The ListMetricDescriptors response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = metric_service.ListMetricDescriptorsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_metric_descriptors, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMetricDescriptorsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_metric_descriptor(self, - request: metric_service.GetMetricDescriptorRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metric_pb2.MetricDescriptor: - r"""Gets a single metric descriptor. This method does not - require a Workspace. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetMetricDescriptorRequest`): - The request object. The `GetMetricDescriptor` request. - name (:class:`str`): - Required. The metric descriptor on which to execute the - request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] - - An example value of ``[METRIC_ID]`` is - ``"compute.googleapis.com/instance/disk/read_bytes_count"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.metric_pb2.MetricDescriptor: - Defines a metric type and its schema. - Once a metric descriptor is created, - deleting or altering it stops data - collection and makes the metric type's - existing data unusable. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = metric_service.GetMetricDescriptorRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_metric_descriptor, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_metric_descriptor(self, - request: metric_service.CreateMetricDescriptorRequest = None, - *, - name: str = None, - metric_descriptor: metric_pb2.MetricDescriptor = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metric_pb2.MetricDescriptor: - r"""Creates a new metric descriptor. User-created metric descriptors - define `custom - metrics `__. - - Args: - request (:class:`google.cloud.monitoring_v3.types.CreateMetricDescriptorRequest`): - The request object. The `CreateMetricDescriptor` - request. - name (:class:`str`): - Required. The - `project `__ - on which to execute the request. The format is: 4 - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metric_descriptor (:class:`google.api.metric_pb2.MetricDescriptor`): - Required. The new `custom - metric `__ - descriptor. - - This corresponds to the ``metric_descriptor`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.metric_pb2.MetricDescriptor: - Defines a metric type and its schema. - Once a metric descriptor is created, - deleting or altering it stops data - collection and makes the metric type's - existing data unusable. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, metric_descriptor]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = metric_service.CreateMetricDescriptorRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if metric_descriptor is not None: - request.metric_descriptor = metric_descriptor - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_metric_descriptor, - default_timeout=12.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_metric_descriptor(self, - request: metric_service.DeleteMetricDescriptorRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a metric descriptor. Only user-created `custom - metrics `__ - can be deleted. - - Args: - request (:class:`google.cloud.monitoring_v3.types.DeleteMetricDescriptorRequest`): - The request object. The `DeleteMetricDescriptor` - request. - name (:class:`str`): - Required. The metric descriptor on which to execute the - request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] - - An example of ``[METRIC_ID]`` is: - ``"custom.googleapis.com/my_test_metric"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = metric_service.DeleteMetricDescriptorRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_metric_descriptor, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_time_series(self, - request: metric_service.ListTimeSeriesRequest = None, - *, - name: str = None, - filter: str = None, - interval: common.TimeInterval = None, - view: metric_service.ListTimeSeriesRequest.TimeSeriesView = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTimeSeriesAsyncPager: - r"""Lists time series that match a filter. This method - does not require a Workspace. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListTimeSeriesRequest`): - The request object. The `ListTimeSeries` request. - name (:class:`str`): - Required. The - `project `__, - organization or folder on which to execute the request. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - organizations/[ORGANIZATION_ID] - folders/[FOLDER_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - Required. A `monitoring - filter `__ - that specifies which time series should be returned. The - filter must specify a single metric type, and can - additionally specify metric labels and other - information. For example: - - :: - - metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND - metric.labels.instance_name = "my-instance-name" - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - interval (:class:`google.cloud.monitoring_v3.types.TimeInterval`): - Required. The time interval for which - results should be returned. Only time - series that contain data points in the - specified interval are included in the - response. - - This corresponds to the ``interval`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - view (:class:`google.cloud.monitoring_v3.types.ListTimeSeriesRequest.TimeSeriesView`): - Required. Specifies which information - is returned about the time series. - - This corresponds to the ``view`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.metric_service.pagers.ListTimeSeriesAsyncPager: - The ListTimeSeries response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, filter, interval, view]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = metric_service.ListTimeSeriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if filter is not None: - request.filter = filter - if interval is not None: - request.interval = interval - if view is not None: - request.view = view - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_time_series, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=90.0, - ), - default_timeout=90.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTimeSeriesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_time_series(self, - request: metric_service.CreateTimeSeriesRequest = None, - *, - name: str = None, - time_series: Sequence[gm_metric.TimeSeries] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Creates or adds data to one or more time series. - The response is empty if all time series in the request - were written. If any time series could not be written, a - corresponding failure message is included in the error - response. - - Args: - request (:class:`google.cloud.monitoring_v3.types.CreateTimeSeriesRequest`): - The request object. The `CreateTimeSeries` request. - name (:class:`str`): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - time_series (:class:`Sequence[google.cloud.monitoring_v3.types.TimeSeries]`): - Required. The new data to be added to a list of time - series. Adds at most one data point to each of several - time series. The new data point must be more recent than - any other point in its time series. Each ``TimeSeries`` - value must fully specify a unique time series by - supplying all label values for the metric and the - monitored resource. - - The maximum number of ``TimeSeries`` objects per - ``Create`` request is 200. - - This corresponds to the ``time_series`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, time_series]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = metric_service.CreateTimeSeriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if time_series: - request.time_series.extend(time_series) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_time_series, - default_timeout=12.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "MetricServiceAsyncClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/client.py deleted file mode 100644 index 07a79178..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/client.py +++ /dev/null @@ -1,1140 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import label_pb2 # type: ignore -from google.api import launch_stage_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.services.metric_service import pagers -from google.cloud.monitoring_v3.types import common -from google.cloud.monitoring_v3.types import metric as gm_metric -from google.cloud.monitoring_v3.types import metric_service -from .transports.base import MetricServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import MetricServiceGrpcTransport -from .transports.grpc_asyncio import MetricServiceGrpcAsyncIOTransport - - -class MetricServiceClientMeta(type): - """Metaclass for the MetricService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MetricServiceTransport]] - _transport_registry["grpc"] = MetricServiceGrpcTransport - _transport_registry["grpc_asyncio"] = MetricServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[MetricServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MetricServiceClient(metaclass=MetricServiceClientMeta): - """Manages metric descriptors, monitored resource descriptors, - and time series data. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "monitoring.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MetricServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetricServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def metric_descriptor_path(project: str,) -> str: - """Returns a fully-qualified metric_descriptor string.""" - return "projects/{project}/metricDescriptors/{metric_descriptor=**}".format(project=project, ) - - @staticmethod - def parse_metric_descriptor_path(path: str) -> Dict[str,str]: - """Parses a metric_descriptor path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/metricDescriptors/{metric_descriptor=**}$", path) - return m.groupdict() if m else {} - - @staticmethod - def monitored_resource_descriptor_path(project: str,monitored_resource_descriptor: str,) -> str: - """Returns a fully-qualified monitored_resource_descriptor string.""" - return "projects/{project}/monitoredResourceDescriptors/{monitored_resource_descriptor}".format(project=project, monitored_resource_descriptor=monitored_resource_descriptor, ) - - @staticmethod - def parse_monitored_resource_descriptor_path(path: str) -> Dict[str,str]: - """Parses a monitored_resource_descriptor path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/monitoredResourceDescriptors/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def time_series_path(project: str,time_series: str,) -> str: - """Returns a fully-qualified time_series string.""" - return "projects/{project}/timeSeries/{time_series}".format(project=project, time_series=time_series, ) - - @staticmethod - def parse_time_series_path(path: str) -> Dict[str,str]: - """Parses a time_series path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/timeSeries/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metric service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, MetricServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, MetricServiceTransport): - # transport is a MetricServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def list_monitored_resource_descriptors(self, - request: metric_service.ListMonitoredResourceDescriptorsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsPager: - r"""Lists monitored resource descriptors that match a - filter. This method does not require a Workspace. - - Args: - request (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest): - The request object. The - `ListMonitoredResourceDescriptors` request. - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.metric_service.pagers.ListMonitoredResourceDescriptorsPager: - The ListMonitoredResourceDescriptors response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a metric_service.ListMonitoredResourceDescriptorsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metric_service.ListMonitoredResourceDescriptorsRequest): - request = metric_service.ListMonitoredResourceDescriptorsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_monitored_resource_descriptors] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMonitoredResourceDescriptorsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_monitored_resource_descriptor(self, - request: metric_service.GetMonitoredResourceDescriptorRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> monitored_resource_pb2.MonitoredResourceDescriptor: - r"""Gets a single monitored resource descriptor. This - method does not require a Workspace. - - Args: - request (google.cloud.monitoring_v3.types.GetMonitoredResourceDescriptorRequest): - The request object. The `GetMonitoredResourceDescriptor` - request. - name (str): - Required. The monitored resource descriptor to get. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/monitoredResourceDescriptors/[RESOURCE_TYPE] - - The ``[RESOURCE_TYPE]`` is a predefined type, such as - ``cloudsql_database``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.monitored_resource_pb2.MonitoredResourceDescriptor: - An object that describes the schema of a [MonitoredResource][google.api.MonitoredResource] object using a - type name and a set of labels. For example, the - monitored resource descriptor for Google Compute - Engine VM instances has a type of "gce_instance" and - specifies the use of the labels "instance_id" and - "zone" to identify particular VM instances. - - Different APIs can support different monitored - resource types. APIs generally provide a list method - that returns the monitored resource descriptors used - by the API. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a metric_service.GetMonitoredResourceDescriptorRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metric_service.GetMonitoredResourceDescriptorRequest): - request = metric_service.GetMonitoredResourceDescriptorRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_monitored_resource_descriptor] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_metric_descriptors(self, - request: metric_service.ListMetricDescriptorsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMetricDescriptorsPager: - r"""Lists metric descriptors that match a filter. This - method does not require a Workspace. - - Args: - request (google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest): - The request object. The `ListMetricDescriptors` request. - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.metric_service.pagers.ListMetricDescriptorsPager: - The ListMetricDescriptors response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a metric_service.ListMetricDescriptorsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metric_service.ListMetricDescriptorsRequest): - request = metric_service.ListMetricDescriptorsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_metric_descriptors] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMetricDescriptorsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_metric_descriptor(self, - request: metric_service.GetMetricDescriptorRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metric_pb2.MetricDescriptor: - r"""Gets a single metric descriptor. This method does not - require a Workspace. - - Args: - request (google.cloud.monitoring_v3.types.GetMetricDescriptorRequest): - The request object. The `GetMetricDescriptor` request. - name (str): - Required. The metric descriptor on which to execute the - request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] - - An example value of ``[METRIC_ID]`` is - ``"compute.googleapis.com/instance/disk/read_bytes_count"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.metric_pb2.MetricDescriptor: - Defines a metric type and its schema. - Once a metric descriptor is created, - deleting or altering it stops data - collection and makes the metric type's - existing data unusable. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a metric_service.GetMetricDescriptorRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metric_service.GetMetricDescriptorRequest): - request = metric_service.GetMetricDescriptorRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_metric_descriptor] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_metric_descriptor(self, - request: metric_service.CreateMetricDescriptorRequest = None, - *, - name: str = None, - metric_descriptor: metric_pb2.MetricDescriptor = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metric_pb2.MetricDescriptor: - r"""Creates a new metric descriptor. User-created metric descriptors - define `custom - metrics `__. - - Args: - request (google.cloud.monitoring_v3.types.CreateMetricDescriptorRequest): - The request object. The `CreateMetricDescriptor` - request. - name (str): - Required. The - `project `__ - on which to execute the request. The format is: 4 - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metric_descriptor (google.api.metric_pb2.MetricDescriptor): - Required. The new `custom - metric `__ - descriptor. - - This corresponds to the ``metric_descriptor`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.metric_pb2.MetricDescriptor: - Defines a metric type and its schema. - Once a metric descriptor is created, - deleting or altering it stops data - collection and makes the metric type's - existing data unusable. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, metric_descriptor]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a metric_service.CreateMetricDescriptorRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metric_service.CreateMetricDescriptorRequest): - request = metric_service.CreateMetricDescriptorRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if metric_descriptor is not None: - request.metric_descriptor = metric_descriptor - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_metric_descriptor] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_metric_descriptor(self, - request: metric_service.DeleteMetricDescriptorRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a metric descriptor. Only user-created `custom - metrics `__ - can be deleted. - - Args: - request (google.cloud.monitoring_v3.types.DeleteMetricDescriptorRequest): - The request object. The `DeleteMetricDescriptor` - request. - name (str): - Required. The metric descriptor on which to execute the - request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] - - An example of ``[METRIC_ID]`` is: - ``"custom.googleapis.com/my_test_metric"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a metric_service.DeleteMetricDescriptorRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metric_service.DeleteMetricDescriptorRequest): - request = metric_service.DeleteMetricDescriptorRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_metric_descriptor] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_time_series(self, - request: metric_service.ListTimeSeriesRequest = None, - *, - name: str = None, - filter: str = None, - interval: common.TimeInterval = None, - view: metric_service.ListTimeSeriesRequest.TimeSeriesView = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTimeSeriesPager: - r"""Lists time series that match a filter. This method - does not require a Workspace. - - Args: - request (google.cloud.monitoring_v3.types.ListTimeSeriesRequest): - The request object. The `ListTimeSeries` request. - name (str): - Required. The - `project `__, - organization or folder on which to execute the request. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - organizations/[ORGANIZATION_ID] - folders/[FOLDER_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - Required. A `monitoring - filter `__ - that specifies which time series should be returned. The - filter must specify a single metric type, and can - additionally specify metric labels and other - information. For example: - - :: - - metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND - metric.labels.instance_name = "my-instance-name" - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - interval (google.cloud.monitoring_v3.types.TimeInterval): - Required. The time interval for which - results should be returned. Only time - series that contain data points in the - specified interval are included in the - response. - - This corresponds to the ``interval`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - view (google.cloud.monitoring_v3.types.ListTimeSeriesRequest.TimeSeriesView): - Required. Specifies which information - is returned about the time series. - - This corresponds to the ``view`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.metric_service.pagers.ListTimeSeriesPager: - The ListTimeSeries response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, filter, interval, view]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a metric_service.ListTimeSeriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metric_service.ListTimeSeriesRequest): - request = metric_service.ListTimeSeriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if filter is not None: - request.filter = filter - if interval is not None: - request.interval = interval - if view is not None: - request.view = view - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_time_series] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTimeSeriesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_time_series(self, - request: metric_service.CreateTimeSeriesRequest = None, - *, - name: str = None, - time_series: Sequence[gm_metric.TimeSeries] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Creates or adds data to one or more time series. - The response is empty if all time series in the request - were written. If any time series could not be written, a - corresponding failure message is included in the error - response. - - Args: - request (google.cloud.monitoring_v3.types.CreateTimeSeriesRequest): - The request object. The `CreateTimeSeries` request. - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): - Required. The new data to be added to a list of time - series. Adds at most one data point to each of several - time series. The new data point must be more recent than - any other point in its time series. Each ``TimeSeries`` - value must fully specify a unique time series by - supplying all label values for the metric and the - monitored resource. - - The maximum number of ``TimeSeries`` objects per - ``Create`` request is 200. - - This corresponds to the ``time_series`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, time_series]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a metric_service.CreateTimeSeriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metric_service.CreateTimeSeriesRequest): - request = metric_service.CreateTimeSeriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if time_series is not None: - request.time_series = time_series - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_time_series] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "MetricServiceClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/pagers.py deleted file mode 100644 index fee4ce44..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/pagers.py +++ /dev/null @@ -1,387 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.api import metric_pb2 # type: ignore -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.types import metric as gm_metric -from google.cloud.monitoring_v3.types import metric_service - - -class ListMonitoredResourceDescriptorsPager: - """A pager for iterating through ``list_monitored_resource_descriptors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``resource_descriptors`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMonitoredResourceDescriptors`` requests and continue to iterate - through the ``resource_descriptors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metric_service.ListMonitoredResourceDescriptorsResponse], - request: metric_service.ListMonitoredResourceDescriptorsRequest, - response: metric_service.ListMonitoredResourceDescriptorsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metric_service.ListMonitoredResourceDescriptorsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[metric_service.ListMonitoredResourceDescriptorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: - for page in self.pages: - yield from page.resource_descriptors - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMonitoredResourceDescriptorsAsyncPager: - """A pager for iterating through ``list_monitored_resource_descriptors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``resource_descriptors`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMonitoredResourceDescriptors`` requests and continue to iterate - through the ``resource_descriptors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metric_service.ListMonitoredResourceDescriptorsResponse]], - request: metric_service.ListMonitoredResourceDescriptorsRequest, - response: metric_service.ListMonitoredResourceDescriptorsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metric_service.ListMonitoredResourceDescriptorsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[metric_service.ListMonitoredResourceDescriptorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: - async def async_generator(): - async for page in self.pages: - for response in page.resource_descriptors: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMetricDescriptorsPager: - """A pager for iterating through ``list_metric_descriptors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``metric_descriptors`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMetricDescriptors`` requests and continue to iterate - through the ``metric_descriptors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metric_service.ListMetricDescriptorsResponse], - request: metric_service.ListMetricDescriptorsRequest, - response: metric_service.ListMetricDescriptorsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metric_service.ListMetricDescriptorsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[metric_service.ListMetricDescriptorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[metric_pb2.MetricDescriptor]: - for page in self.pages: - yield from page.metric_descriptors - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMetricDescriptorsAsyncPager: - """A pager for iterating through ``list_metric_descriptors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``metric_descriptors`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMetricDescriptors`` requests and continue to iterate - through the ``metric_descriptors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metric_service.ListMetricDescriptorsResponse]], - request: metric_service.ListMetricDescriptorsRequest, - response: metric_service.ListMetricDescriptorsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListMetricDescriptorsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metric_service.ListMetricDescriptorsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[metric_service.ListMetricDescriptorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[metric_pb2.MetricDescriptor]: - async def async_generator(): - async for page in self.pages: - for response in page.metric_descriptors: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTimeSeriesPager: - """A pager for iterating through ``list_time_series`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListTimeSeriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``time_series`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTimeSeries`` requests and continue to iterate - through the ``time_series`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListTimeSeriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metric_service.ListTimeSeriesResponse], - request: metric_service.ListTimeSeriesRequest, - response: metric_service.ListTimeSeriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListTimeSeriesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListTimeSeriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metric_service.ListTimeSeriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[metric_service.ListTimeSeriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[gm_metric.TimeSeries]: - for page in self.pages: - yield from page.time_series - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTimeSeriesAsyncPager: - """A pager for iterating through ``list_time_series`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListTimeSeriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``time_series`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTimeSeries`` requests and continue to iterate - through the ``time_series`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListTimeSeriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metric_service.ListTimeSeriesResponse]], - request: metric_service.ListTimeSeriesRequest, - response: metric_service.ListTimeSeriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListTimeSeriesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListTimeSeriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metric_service.ListTimeSeriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[metric_service.ListTimeSeriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[gm_metric.TimeSeries]: - async def async_generator(): - async for page in self.pages: - for response in page.time_series: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/__init__.py deleted file mode 100644 index bbb43274..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MetricServiceTransport -from .grpc import MetricServiceGrpcTransport -from .grpc_asyncio import MetricServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MetricServiceTransport]] -_transport_registry['grpc'] = MetricServiceGrpcTransport -_transport_registry['grpc_asyncio'] = MetricServiceGrpcAsyncIOTransport - -__all__ = ( - 'MetricServiceTransport', - 'MetricServiceGrpcTransport', - 'MetricServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/base.py deleted file mode 100644 index 2c487de9..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/base.py +++ /dev/null @@ -1,308 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import metric_pb2 # type: ignore -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.types import metric_service -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-monitoring', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class MetricServiceTransport(abc.ABC): - """Abstract transport class for MetricService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - 'https://www.googleapis.com/auth/monitoring.write', - ) - - DEFAULT_HOST: str = 'monitoring.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_monitored_resource_descriptors: gapic_v1.method.wrap_method( - self.list_monitored_resource_descriptors, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.get_monitored_resource_descriptor: gapic_v1.method.wrap_method( - self.get_monitored_resource_descriptor, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_metric_descriptors: gapic_v1.method.wrap_method( - self.list_metric_descriptors, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.get_metric_descriptor: gapic_v1.method.wrap_method( - self.get_metric_descriptor, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.create_metric_descriptor: gapic_v1.method.wrap_method( - self.create_metric_descriptor, - default_timeout=12.0, - client_info=client_info, - ), - self.delete_metric_descriptor: gapic_v1.method.wrap_method( - self.delete_metric_descriptor, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_time_series: gapic_v1.method.wrap_method( - self.list_time_series, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=90.0, - ), - default_timeout=90.0, - client_info=client_info, - ), - self.create_time_series: gapic_v1.method.wrap_method( - self.create_time_series, - default_timeout=12.0, - client_info=client_info, - ), - } - - @property - def list_monitored_resource_descriptors(self) -> Callable[ - [metric_service.ListMonitoredResourceDescriptorsRequest], - Union[ - metric_service.ListMonitoredResourceDescriptorsResponse, - Awaitable[metric_service.ListMonitoredResourceDescriptorsResponse] - ]]: - raise NotImplementedError() - - @property - def get_monitored_resource_descriptor(self) -> Callable[ - [metric_service.GetMonitoredResourceDescriptorRequest], - Union[ - monitored_resource_pb2.MonitoredResourceDescriptor, - Awaitable[monitored_resource_pb2.MonitoredResourceDescriptor] - ]]: - raise NotImplementedError() - - @property - def list_metric_descriptors(self) -> Callable[ - [metric_service.ListMetricDescriptorsRequest], - Union[ - metric_service.ListMetricDescriptorsResponse, - Awaitable[metric_service.ListMetricDescriptorsResponse] - ]]: - raise NotImplementedError() - - @property - def get_metric_descriptor(self) -> Callable[ - [metric_service.GetMetricDescriptorRequest], - Union[ - metric_pb2.MetricDescriptor, - Awaitable[metric_pb2.MetricDescriptor] - ]]: - raise NotImplementedError() - - @property - def create_metric_descriptor(self) -> Callable[ - [metric_service.CreateMetricDescriptorRequest], - Union[ - metric_pb2.MetricDescriptor, - Awaitable[metric_pb2.MetricDescriptor] - ]]: - raise NotImplementedError() - - @property - def delete_metric_descriptor(self) -> Callable[ - [metric_service.DeleteMetricDescriptorRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_time_series(self) -> Callable[ - [metric_service.ListTimeSeriesRequest], - Union[ - metric_service.ListTimeSeriesResponse, - Awaitable[metric_service.ListTimeSeriesResponse] - ]]: - raise NotImplementedError() - - @property - def create_time_series(self) -> Callable[ - [metric_service.CreateTimeSeriesRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'MetricServiceTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py deleted file mode 100644 index 20143d19..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py +++ /dev/null @@ -1,453 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.api import metric_pb2 # type: ignore -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.types import metric_service -from google.protobuf import empty_pb2 # type: ignore -from .base import MetricServiceTransport, DEFAULT_CLIENT_INFO - - -class MetricServiceGrpcTransport(MetricServiceTransport): - """gRPC backend transport for MetricService. - - Manages metric descriptors, monitored resource descriptors, - and time series data. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_monitored_resource_descriptors(self) -> Callable[ - [metric_service.ListMonitoredResourceDescriptorsRequest], - metric_service.ListMonitoredResourceDescriptorsResponse]: - r"""Return a callable for the list monitored resource - descriptors method over gRPC. - - Lists monitored resource descriptors that match a - filter. This method does not require a Workspace. - - Returns: - Callable[[~.ListMonitoredResourceDescriptorsRequest], - ~.ListMonitoredResourceDescriptorsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/ListMonitoredResourceDescriptors', - request_serializer=metric_service.ListMonitoredResourceDescriptorsRequest.serialize, - response_deserializer=metric_service.ListMonitoredResourceDescriptorsResponse.deserialize, - ) - return self._stubs['list_monitored_resource_descriptors'] - - @property - def get_monitored_resource_descriptor(self) -> Callable[ - [metric_service.GetMonitoredResourceDescriptorRequest], - monitored_resource_pb2.MonitoredResourceDescriptor]: - r"""Return a callable for the get monitored resource - descriptor method over gRPC. - - Gets a single monitored resource descriptor. This - method does not require a Workspace. - - Returns: - Callable[[~.GetMonitoredResourceDescriptorRequest], - ~.MonitoredResourceDescriptor]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_monitored_resource_descriptor' not in self._stubs: - self._stubs['get_monitored_resource_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/GetMonitoredResourceDescriptor', - request_serializer=metric_service.GetMonitoredResourceDescriptorRequest.serialize, - response_deserializer=monitored_resource_pb2.MonitoredResourceDescriptor.FromString, - ) - return self._stubs['get_monitored_resource_descriptor'] - - @property - def list_metric_descriptors(self) -> Callable[ - [metric_service.ListMetricDescriptorsRequest], - metric_service.ListMetricDescriptorsResponse]: - r"""Return a callable for the list metric descriptors method over gRPC. - - Lists metric descriptors that match a filter. This - method does not require a Workspace. - - Returns: - Callable[[~.ListMetricDescriptorsRequest], - ~.ListMetricDescriptorsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_metric_descriptors' not in self._stubs: - self._stubs['list_metric_descriptors'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/ListMetricDescriptors', - request_serializer=metric_service.ListMetricDescriptorsRequest.serialize, - response_deserializer=metric_service.ListMetricDescriptorsResponse.deserialize, - ) - return self._stubs['list_metric_descriptors'] - - @property - def get_metric_descriptor(self) -> Callable[ - [metric_service.GetMetricDescriptorRequest], - metric_pb2.MetricDescriptor]: - r"""Return a callable for the get metric descriptor method over gRPC. - - Gets a single metric descriptor. This method does not - require a Workspace. - - Returns: - Callable[[~.GetMetricDescriptorRequest], - ~.MetricDescriptor]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_metric_descriptor' not in self._stubs: - self._stubs['get_metric_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/GetMetricDescriptor', - request_serializer=metric_service.GetMetricDescriptorRequest.serialize, - response_deserializer=metric_pb2.MetricDescriptor.FromString, - ) - return self._stubs['get_metric_descriptor'] - - @property - def create_metric_descriptor(self) -> Callable[ - [metric_service.CreateMetricDescriptorRequest], - metric_pb2.MetricDescriptor]: - r"""Return a callable for the create metric descriptor method over gRPC. - - Creates a new metric descriptor. User-created metric descriptors - define `custom - metrics `__. - - Returns: - Callable[[~.CreateMetricDescriptorRequest], - ~.MetricDescriptor]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_metric_descriptor' not in self._stubs: - self._stubs['create_metric_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/CreateMetricDescriptor', - request_serializer=metric_service.CreateMetricDescriptorRequest.serialize, - response_deserializer=metric_pb2.MetricDescriptor.FromString, - ) - return self._stubs['create_metric_descriptor'] - - @property - def delete_metric_descriptor(self) -> Callable[ - [metric_service.DeleteMetricDescriptorRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete metric descriptor method over gRPC. - - Deletes a metric descriptor. Only user-created `custom - metrics `__ - can be deleted. - - Returns: - Callable[[~.DeleteMetricDescriptorRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_metric_descriptor' not in self._stubs: - self._stubs['delete_metric_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/DeleteMetricDescriptor', - request_serializer=metric_service.DeleteMetricDescriptorRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_metric_descriptor'] - - @property - def list_time_series(self) -> Callable[ - [metric_service.ListTimeSeriesRequest], - metric_service.ListTimeSeriesResponse]: - r"""Return a callable for the list time series method over gRPC. - - Lists time series that match a filter. This method - does not require a Workspace. - - Returns: - Callable[[~.ListTimeSeriesRequest], - ~.ListTimeSeriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_time_series' not in self._stubs: - self._stubs['list_time_series'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/ListTimeSeries', - request_serializer=metric_service.ListTimeSeriesRequest.serialize, - response_deserializer=metric_service.ListTimeSeriesResponse.deserialize, - ) - return self._stubs['list_time_series'] - - @property - def create_time_series(self) -> Callable[ - [metric_service.CreateTimeSeriesRequest], - empty_pb2.Empty]: - r"""Return a callable for the create time series method over gRPC. - - Creates or adds data to one or more time series. - The response is empty if all time series in the request - were written. If any time series could not be written, a - corresponding failure message is included in the error - response. - - Returns: - Callable[[~.CreateTimeSeriesRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_time_series' not in self._stubs: - self._stubs['create_time_series'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/CreateTimeSeries', - request_serializer=metric_service.CreateTimeSeriesRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['create_time_series'] - - -__all__ = ( - 'MetricServiceGrpcTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py deleted file mode 100644 index 8fc56a06..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,457 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.api import metric_pb2 # type: ignore -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.types import metric_service -from google.protobuf import empty_pb2 # type: ignore -from .base import MetricServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import MetricServiceGrpcTransport - - -class MetricServiceGrpcAsyncIOTransport(MetricServiceTransport): - """gRPC AsyncIO backend transport for MetricService. - - Manages metric descriptors, monitored resource descriptors, - and time series data. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_monitored_resource_descriptors(self) -> Callable[ - [metric_service.ListMonitoredResourceDescriptorsRequest], - Awaitable[metric_service.ListMonitoredResourceDescriptorsResponse]]: - r"""Return a callable for the list monitored resource - descriptors method over gRPC. - - Lists monitored resource descriptors that match a - filter. This method does not require a Workspace. - - Returns: - Callable[[~.ListMonitoredResourceDescriptorsRequest], - Awaitable[~.ListMonitoredResourceDescriptorsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/ListMonitoredResourceDescriptors', - request_serializer=metric_service.ListMonitoredResourceDescriptorsRequest.serialize, - response_deserializer=metric_service.ListMonitoredResourceDescriptorsResponse.deserialize, - ) - return self._stubs['list_monitored_resource_descriptors'] - - @property - def get_monitored_resource_descriptor(self) -> Callable[ - [metric_service.GetMonitoredResourceDescriptorRequest], - Awaitable[monitored_resource_pb2.MonitoredResourceDescriptor]]: - r"""Return a callable for the get monitored resource - descriptor method over gRPC. - - Gets a single monitored resource descriptor. This - method does not require a Workspace. - - Returns: - Callable[[~.GetMonitoredResourceDescriptorRequest], - Awaitable[~.MonitoredResourceDescriptor]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_monitored_resource_descriptor' not in self._stubs: - self._stubs['get_monitored_resource_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/GetMonitoredResourceDescriptor', - request_serializer=metric_service.GetMonitoredResourceDescriptorRequest.serialize, - response_deserializer=monitored_resource_pb2.MonitoredResourceDescriptor.FromString, - ) - return self._stubs['get_monitored_resource_descriptor'] - - @property - def list_metric_descriptors(self) -> Callable[ - [metric_service.ListMetricDescriptorsRequest], - Awaitable[metric_service.ListMetricDescriptorsResponse]]: - r"""Return a callable for the list metric descriptors method over gRPC. - - Lists metric descriptors that match a filter. This - method does not require a Workspace. - - Returns: - Callable[[~.ListMetricDescriptorsRequest], - Awaitable[~.ListMetricDescriptorsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_metric_descriptors' not in self._stubs: - self._stubs['list_metric_descriptors'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/ListMetricDescriptors', - request_serializer=metric_service.ListMetricDescriptorsRequest.serialize, - response_deserializer=metric_service.ListMetricDescriptorsResponse.deserialize, - ) - return self._stubs['list_metric_descriptors'] - - @property - def get_metric_descriptor(self) -> Callable[ - [metric_service.GetMetricDescriptorRequest], - Awaitable[metric_pb2.MetricDescriptor]]: - r"""Return a callable for the get metric descriptor method over gRPC. - - Gets a single metric descriptor. This method does not - require a Workspace. - - Returns: - Callable[[~.GetMetricDescriptorRequest], - Awaitable[~.MetricDescriptor]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_metric_descriptor' not in self._stubs: - self._stubs['get_metric_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/GetMetricDescriptor', - request_serializer=metric_service.GetMetricDescriptorRequest.serialize, - response_deserializer=metric_pb2.MetricDescriptor.FromString, - ) - return self._stubs['get_metric_descriptor'] - - @property - def create_metric_descriptor(self) -> Callable[ - [metric_service.CreateMetricDescriptorRequest], - Awaitable[metric_pb2.MetricDescriptor]]: - r"""Return a callable for the create metric descriptor method over gRPC. - - Creates a new metric descriptor. User-created metric descriptors - define `custom - metrics `__. - - Returns: - Callable[[~.CreateMetricDescriptorRequest], - Awaitable[~.MetricDescriptor]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_metric_descriptor' not in self._stubs: - self._stubs['create_metric_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/CreateMetricDescriptor', - request_serializer=metric_service.CreateMetricDescriptorRequest.serialize, - response_deserializer=metric_pb2.MetricDescriptor.FromString, - ) - return self._stubs['create_metric_descriptor'] - - @property - def delete_metric_descriptor(self) -> Callable[ - [metric_service.DeleteMetricDescriptorRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete metric descriptor method over gRPC. - - Deletes a metric descriptor. Only user-created `custom - metrics `__ - can be deleted. - - Returns: - Callable[[~.DeleteMetricDescriptorRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_metric_descriptor' not in self._stubs: - self._stubs['delete_metric_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/DeleteMetricDescriptor', - request_serializer=metric_service.DeleteMetricDescriptorRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_metric_descriptor'] - - @property - def list_time_series(self) -> Callable[ - [metric_service.ListTimeSeriesRequest], - Awaitable[metric_service.ListTimeSeriesResponse]]: - r"""Return a callable for the list time series method over gRPC. - - Lists time series that match a filter. This method - does not require a Workspace. - - Returns: - Callable[[~.ListTimeSeriesRequest], - Awaitable[~.ListTimeSeriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_time_series' not in self._stubs: - self._stubs['list_time_series'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/ListTimeSeries', - request_serializer=metric_service.ListTimeSeriesRequest.serialize, - response_deserializer=metric_service.ListTimeSeriesResponse.deserialize, - ) - return self._stubs['list_time_series'] - - @property - def create_time_series(self) -> Callable[ - [metric_service.CreateTimeSeriesRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the create time series method over gRPC. - - Creates or adds data to one or more time series. - The response is empty if all time series in the request - were written. If any time series could not be written, a - corresponding failure message is included in the error - response. - - Returns: - Callable[[~.CreateTimeSeriesRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_time_series' not in self._stubs: - self._stubs['create_time_series'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.MetricService/CreateTimeSeries', - request_serializer=metric_service.CreateTimeSeriesRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['create_time_series'] - - -__all__ = ( - 'MetricServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/__init__.py deleted file mode 100644 index fae0f0b3..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import NotificationChannelServiceClient -from .async_client import NotificationChannelServiceAsyncClient - -__all__ = ( - 'NotificationChannelServiceClient', - 'NotificationChannelServiceAsyncClient', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py deleted file mode 100644 index 710ca1db..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py +++ /dev/null @@ -1,1143 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import label_pb2 # type: ignore -from google.api import launch_stage_pb2 # type: ignore -from google.cloud.monitoring_v3.services.notification_channel_service import pagers -from google.cloud.monitoring_v3.types import mutation_record -from google.cloud.monitoring_v3.types import notification -from google.cloud.monitoring_v3.types import notification_service -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from .transports.base import NotificationChannelServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import NotificationChannelServiceGrpcAsyncIOTransport -from .client import NotificationChannelServiceClient - - -class NotificationChannelServiceAsyncClient: - """The Notification Channel API provides access to configuration - that controls how messages related to incidents are sent. - """ - - _client: NotificationChannelServiceClient - - DEFAULT_ENDPOINT = NotificationChannelServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = NotificationChannelServiceClient.DEFAULT_MTLS_ENDPOINT - - notification_channel_path = staticmethod(NotificationChannelServiceClient.notification_channel_path) - parse_notification_channel_path = staticmethod(NotificationChannelServiceClient.parse_notification_channel_path) - notification_channel_descriptor_path = staticmethod(NotificationChannelServiceClient.notification_channel_descriptor_path) - parse_notification_channel_descriptor_path = staticmethod(NotificationChannelServiceClient.parse_notification_channel_descriptor_path) - common_billing_account_path = staticmethod(NotificationChannelServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(NotificationChannelServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(NotificationChannelServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(NotificationChannelServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(NotificationChannelServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(NotificationChannelServiceClient.parse_common_organization_path) - common_project_path = staticmethod(NotificationChannelServiceClient.common_project_path) - parse_common_project_path = staticmethod(NotificationChannelServiceClient.parse_common_project_path) - common_location_path = staticmethod(NotificationChannelServiceClient.common_location_path) - parse_common_location_path = staticmethod(NotificationChannelServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - NotificationChannelServiceAsyncClient: The constructed client. - """ - return NotificationChannelServiceClient.from_service_account_info.__func__(NotificationChannelServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - NotificationChannelServiceAsyncClient: The constructed client. - """ - return NotificationChannelServiceClient.from_service_account_file.__func__(NotificationChannelServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> NotificationChannelServiceTransport: - """Returns the transport used by the client instance. - - Returns: - NotificationChannelServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(NotificationChannelServiceClient).get_transport_class, type(NotificationChannelServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, NotificationChannelServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the notification channel service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.NotificationChannelServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = NotificationChannelServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_notification_channel_descriptors(self, - request: notification_service.ListNotificationChannelDescriptorsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNotificationChannelDescriptorsAsyncPager: - r"""Lists the descriptors for supported channel types. - The use of descriptors makes it possible for new channel - types to be dynamically added. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest`): - The request object. The - `ListNotificationChannelDescriptors` request. - name (:class:`str`): - Required. The REST resource name of the parent from - which to retrieve the notification channel descriptors. - The expected syntax is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - Note that this - `names `__ - the parent container in which to look for the - descriptors; to retrieve a single descriptor by name, - use the - [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] - operation, instead. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.notification_channel_service.pagers.ListNotificationChannelDescriptorsAsyncPager: - The ListNotificationChannelDescriptors response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.ListNotificationChannelDescriptorsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_notification_channel_descriptors, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListNotificationChannelDescriptorsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_notification_channel_descriptor(self, - request: notification_service.GetNotificationChannelDescriptorRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannelDescriptor: - r"""Gets a single channel descriptor. The descriptor - indicates which fields are expected / permitted for a - notification channel of the given type. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetNotificationChannelDescriptorRequest`): - The request object. The - `GetNotificationChannelDescriptor` response. - name (:class:`str`): - Required. The channel type for which to execute the - request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannelDescriptors/[CHANNEL_TYPE] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannelDescriptor: - A description of a notification - channel. The descriptor includes the - properties of the channel and the set of - labels or fields that must be specified - to configure channels of a given type. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.GetNotificationChannelDescriptorRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_notification_channel_descriptor, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_notification_channels(self, - request: notification_service.ListNotificationChannelsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNotificationChannelsAsyncPager: - r"""Lists the notification channels that have been - created for the project. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListNotificationChannelsRequest`): - The request object. The `ListNotificationChannels` - request. - name (:class:`str`): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This names the container in which to look for the - notification channels; it does not name a specific - channel. To query a specific channel by REST resource - name, use the - [``GetNotificationChannel``][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] - operation. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.notification_channel_service.pagers.ListNotificationChannelsAsyncPager: - The ListNotificationChannels response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.ListNotificationChannelsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_notification_channels, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListNotificationChannelsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_notification_channel(self, - request: notification_service.GetNotificationChannelRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannel: - r"""Gets a single notification channel. The channel - includes the relevant configuration details with which - the channel was created. However, the response may - truncate or omit passwords, API keys, or other private - key matter and thus the response may not be 100% - identical to the information that was supplied in the - call to the create method. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetNotificationChannelRequest`): - The request object. The `GetNotificationChannel` - request. - name (:class:`str`): - Required. The channel for which to execute the request. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannel: - A NotificationChannel is a medium through which an alert is - delivered when a policy violation is detected. - Examples of channels include email, SMS, and - third-party messaging applications. Fields containing - sensitive information like authentication tokens or - contact info are only partially populated on - retrieval. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.GetNotificationChannelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_notification_channel, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_notification_channel(self, - request: notification_service.CreateNotificationChannelRequest = None, - *, - name: str = None, - notification_channel: notification.NotificationChannel = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannel: - r"""Creates a new notification channel, representing a - single notification endpoint such as an email address, - SMS number, or PagerDuty service. - - Args: - request (:class:`google.cloud.monitoring_v3.types.CreateNotificationChannelRequest`): - The request object. The `CreateNotificationChannel` - request. - name (:class:`str`): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This names the container into which the channel will be - written, this does not name the newly created channel. - The resulting channel's name will have a normalized - version of this field as a prefix, but will add - ``/notificationChannels/[CHANNEL_ID]`` to identify the - channel. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - notification_channel (:class:`google.cloud.monitoring_v3.types.NotificationChannel`): - Required. The definition of the ``NotificationChannel`` - to create. - - This corresponds to the ``notification_channel`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannel: - A NotificationChannel is a medium through which an alert is - delivered when a policy violation is detected. - Examples of channels include email, SMS, and - third-party messaging applications. Fields containing - sensitive information like authentication tokens or - contact info are only partially populated on - retrieval. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, notification_channel]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.CreateNotificationChannelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if notification_channel is not None: - request.notification_channel = notification_channel - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_notification_channel, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_notification_channel(self, - request: notification_service.UpdateNotificationChannelRequest = None, - *, - update_mask: field_mask_pb2.FieldMask = None, - notification_channel: notification.NotificationChannel = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannel: - r"""Updates a notification channel. Fields not specified - in the field mask remain unchanged. - - Args: - request (:class:`google.cloud.monitoring_v3.types.UpdateNotificationChannelRequest`): - The request object. The `UpdateNotificationChannel` - request. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - notification_channel (:class:`google.cloud.monitoring_v3.types.NotificationChannel`): - Required. A description of the changes to be applied to - the specified notification channel. The description must - provide a definition for fields to be updated; the names - of these fields should also be included in the - ``update_mask``. - - This corresponds to the ``notification_channel`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannel: - A NotificationChannel is a medium through which an alert is - delivered when a policy violation is detected. - Examples of channels include email, SMS, and - third-party messaging applications. Fields containing - sensitive information like authentication tokens or - contact info are only partially populated on - retrieval. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, notification_channel]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.UpdateNotificationChannelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if update_mask is not None: - request.update_mask = update_mask - if notification_channel is not None: - request.notification_channel = notification_channel - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_notification_channel, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("notification_channel.name", request.notification_channel.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_notification_channel(self, - request: notification_service.DeleteNotificationChannelRequest = None, - *, - name: str = None, - force: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a notification channel. - - Args: - request (:class:`google.cloud.monitoring_v3.types.DeleteNotificationChannelRequest`): - The request object. The `DeleteNotificationChannel` - request. - name (:class:`str`): - Required. The channel for which to execute the request. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (:class:`bool`): - If true, the notification channel - will be deleted regardless of its use in - alert policies (the policies will be - updated to remove the channel). If - false, channels that are still - referenced by an existing alerting - policy will fail to be deleted in a - delete operation. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.DeleteNotificationChannelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_notification_channel, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def send_notification_channel_verification_code(self, - request: notification_service.SendNotificationChannelVerificationCodeRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Causes a verification code to be delivered to the channel. The - code can then be supplied in ``VerifyNotificationChannel`` to - verify the channel. - - Args: - request (:class:`google.cloud.monitoring_v3.types.SendNotificationChannelVerificationCodeRequest`): - The request object. The - `SendNotificationChannelVerificationCode` request. - name (:class:`str`): - Required. The notification channel to - which to send a verification code. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.SendNotificationChannelVerificationCodeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.send_notification_channel_verification_code, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_notification_channel_verification_code(self, - request: notification_service.GetNotificationChannelVerificationCodeRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification_service.GetNotificationChannelVerificationCodeResponse: - r"""Requests a verification code for an already verified - channel that can then be used in a call to - VerifyNotificationChannel() on a different channel with - an equivalent identity in the same or in a different - project. This makes it possible to copy a channel - between projects without requiring manual reverification - of the channel. If the channel is not in the verified - state, this method will fail (in other words, this may - only be used if the - SendNotificationChannelVerificationCode and - VerifyNotificationChannel paths have already been used - to put the given channel into the verified state). - - There is no guarantee that the verification codes - returned by this method will be of a similar structure - or form as the ones that are delivered to the channel - via SendNotificationChannelVerificationCode; while - VerifyNotificationChannel() will recognize both the - codes delivered via - SendNotificationChannelVerificationCode() and returned - from GetNotificationChannelVerificationCode(), it is - typically the case that the verification codes delivered - via - SendNotificationChannelVerificationCode() will be - shorter and also have a shorter expiration (e.g. codes - such as "G-123456") whereas GetVerificationCode() will - typically return a much longer, websafe base 64 encoded - string that has a longer expiration time. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeRequest`): - The request object. The - `GetNotificationChannelVerificationCode` request. - name (:class:`str`): - Required. The notification channel - for which a verification code is to be - generated and retrieved. This must name - a channel that is already verified; if - the specified channel is not verified, - the request will fail. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeResponse: - The GetNotificationChannelVerificationCode request. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.GetNotificationChannelVerificationCodeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_notification_channel_verification_code, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def verify_notification_channel(self, - request: notification_service.VerifyNotificationChannelRequest = None, - *, - name: str = None, - code: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannel: - r"""Verifies a ``NotificationChannel`` by proving receipt of the - code delivered to the channel as a result of calling - ``SendNotificationChannelVerificationCode``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.VerifyNotificationChannelRequest`): - The request object. The `VerifyNotificationChannel` - request. - name (:class:`str`): - Required. The notification channel to - verify. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - code (:class:`str`): - Required. The verification code that was delivered to - the channel as a result of invoking the - ``SendNotificationChannelVerificationCode`` API method - or that was retrieved from a verified channel via - ``GetNotificationChannelVerificationCode``. For example, - one might have "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" - (in general, one is only guaranteed that the code is - valid UTF-8; one should not make any assumptions - regarding the structure or format of the code). - - This corresponds to the ``code`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannel: - A NotificationChannel is a medium through which an alert is - delivered when a policy violation is detected. - Examples of channels include email, SMS, and - third-party messaging applications. Fields containing - sensitive information like authentication tokens or - contact info are only partially populated on - retrieval. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, code]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = notification_service.VerifyNotificationChannelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if code is not None: - request.code = code - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.verify_notification_channel, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "NotificationChannelServiceAsyncClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/client.py deleted file mode 100644 index 5ce3fa51..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/client.py +++ /dev/null @@ -1,1301 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import label_pb2 # type: ignore -from google.api import launch_stage_pb2 # type: ignore -from google.cloud.monitoring_v3.services.notification_channel_service import pagers -from google.cloud.monitoring_v3.types import mutation_record -from google.cloud.monitoring_v3.types import notification -from google.cloud.monitoring_v3.types import notification_service -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from .transports.base import NotificationChannelServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import NotificationChannelServiceGrpcTransport -from .transports.grpc_asyncio import NotificationChannelServiceGrpcAsyncIOTransport - - -class NotificationChannelServiceClientMeta(type): - """Metaclass for the NotificationChannelService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[NotificationChannelServiceTransport]] - _transport_registry["grpc"] = NotificationChannelServiceGrpcTransport - _transport_registry["grpc_asyncio"] = NotificationChannelServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[NotificationChannelServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class NotificationChannelServiceClient(metaclass=NotificationChannelServiceClientMeta): - """The Notification Channel API provides access to configuration - that controls how messages related to incidents are sent. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "monitoring.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - NotificationChannelServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - NotificationChannelServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> NotificationChannelServiceTransport: - """Returns the transport used by the client instance. - - Returns: - NotificationChannelServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def notification_channel_path(project: str,notification_channel: str,) -> str: - """Returns a fully-qualified notification_channel string.""" - return "projects/{project}/notificationChannels/{notification_channel}".format(project=project, notification_channel=notification_channel, ) - - @staticmethod - def parse_notification_channel_path(path: str) -> Dict[str,str]: - """Parses a notification_channel path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/notificationChannels/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def notification_channel_descriptor_path(project: str,channel_descriptor: str,) -> str: - """Returns a fully-qualified notification_channel_descriptor string.""" - return "projects/{project}/notificationChannelDescriptors/{channel_descriptor}".format(project=project, channel_descriptor=channel_descriptor, ) - - @staticmethod - def parse_notification_channel_descriptor_path(path: str) -> Dict[str,str]: - """Parses a notification_channel_descriptor path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/notificationChannelDescriptors/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, NotificationChannelServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the notification channel service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, NotificationChannelServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, NotificationChannelServiceTransport): - # transport is a NotificationChannelServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def list_notification_channel_descriptors(self, - request: notification_service.ListNotificationChannelDescriptorsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNotificationChannelDescriptorsPager: - r"""Lists the descriptors for supported channel types. - The use of descriptors makes it possible for new channel - types to be dynamically added. - - Args: - request (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest): - The request object. The - `ListNotificationChannelDescriptors` request. - name (str): - Required. The REST resource name of the parent from - which to retrieve the notification channel descriptors. - The expected syntax is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - Note that this - `names `__ - the parent container in which to look for the - descriptors; to retrieve a single descriptor by name, - use the - [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] - operation, instead. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.notification_channel_service.pagers.ListNotificationChannelDescriptorsPager: - The ListNotificationChannelDescriptors response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.ListNotificationChannelDescriptorsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.ListNotificationChannelDescriptorsRequest): - request = notification_service.ListNotificationChannelDescriptorsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_notification_channel_descriptors] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListNotificationChannelDescriptorsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_notification_channel_descriptor(self, - request: notification_service.GetNotificationChannelDescriptorRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannelDescriptor: - r"""Gets a single channel descriptor. The descriptor - indicates which fields are expected / permitted for a - notification channel of the given type. - - Args: - request (google.cloud.monitoring_v3.types.GetNotificationChannelDescriptorRequest): - The request object. The - `GetNotificationChannelDescriptor` response. - name (str): - Required. The channel type for which to execute the - request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannelDescriptors/[CHANNEL_TYPE] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannelDescriptor: - A description of a notification - channel. The descriptor includes the - properties of the channel and the set of - labels or fields that must be specified - to configure channels of a given type. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.GetNotificationChannelDescriptorRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.GetNotificationChannelDescriptorRequest): - request = notification_service.GetNotificationChannelDescriptorRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_notification_channel_descriptor] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_notification_channels(self, - request: notification_service.ListNotificationChannelsRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNotificationChannelsPager: - r"""Lists the notification channels that have been - created for the project. - - Args: - request (google.cloud.monitoring_v3.types.ListNotificationChannelsRequest): - The request object. The `ListNotificationChannels` - request. - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This names the container in which to look for the - notification channels; it does not name a specific - channel. To query a specific channel by REST resource - name, use the - [``GetNotificationChannel``][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] - operation. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.notification_channel_service.pagers.ListNotificationChannelsPager: - The ListNotificationChannels response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.ListNotificationChannelsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.ListNotificationChannelsRequest): - request = notification_service.ListNotificationChannelsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_notification_channels] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListNotificationChannelsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_notification_channel(self, - request: notification_service.GetNotificationChannelRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannel: - r"""Gets a single notification channel. The channel - includes the relevant configuration details with which - the channel was created. However, the response may - truncate or omit passwords, API keys, or other private - key matter and thus the response may not be 100% - identical to the information that was supplied in the - call to the create method. - - Args: - request (google.cloud.monitoring_v3.types.GetNotificationChannelRequest): - The request object. The `GetNotificationChannel` - request. - name (str): - Required. The channel for which to execute the request. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannel: - A NotificationChannel is a medium through which an alert is - delivered when a policy violation is detected. - Examples of channels include email, SMS, and - third-party messaging applications. Fields containing - sensitive information like authentication tokens or - contact info are only partially populated on - retrieval. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.GetNotificationChannelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.GetNotificationChannelRequest): - request = notification_service.GetNotificationChannelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_notification_channel] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_notification_channel(self, - request: notification_service.CreateNotificationChannelRequest = None, - *, - name: str = None, - notification_channel: notification.NotificationChannel = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannel: - r"""Creates a new notification channel, representing a - single notification endpoint such as an email address, - SMS number, or PagerDuty service. - - Args: - request (google.cloud.monitoring_v3.types.CreateNotificationChannelRequest): - The request object. The `CreateNotificationChannel` - request. - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This names the container into which the channel will be - written, this does not name the newly created channel. - The resulting channel's name will have a normalized - version of this field as a prefix, but will add - ``/notificationChannels/[CHANNEL_ID]`` to identify the - channel. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - notification_channel (google.cloud.monitoring_v3.types.NotificationChannel): - Required. The definition of the ``NotificationChannel`` - to create. - - This corresponds to the ``notification_channel`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannel: - A NotificationChannel is a medium through which an alert is - delivered when a policy violation is detected. - Examples of channels include email, SMS, and - third-party messaging applications. Fields containing - sensitive information like authentication tokens or - contact info are only partially populated on - retrieval. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, notification_channel]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.CreateNotificationChannelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.CreateNotificationChannelRequest): - request = notification_service.CreateNotificationChannelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if notification_channel is not None: - request.notification_channel = notification_channel - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_notification_channel] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_notification_channel(self, - request: notification_service.UpdateNotificationChannelRequest = None, - *, - update_mask: field_mask_pb2.FieldMask = None, - notification_channel: notification.NotificationChannel = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannel: - r"""Updates a notification channel. Fields not specified - in the field mask remain unchanged. - - Args: - request (google.cloud.monitoring_v3.types.UpdateNotificationChannelRequest): - The request object. The `UpdateNotificationChannel` - request. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - notification_channel (google.cloud.monitoring_v3.types.NotificationChannel): - Required. A description of the changes to be applied to - the specified notification channel. The description must - provide a definition for fields to be updated; the names - of these fields should also be included in the - ``update_mask``. - - This corresponds to the ``notification_channel`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannel: - A NotificationChannel is a medium through which an alert is - delivered when a policy violation is detected. - Examples of channels include email, SMS, and - third-party messaging applications. Fields containing - sensitive information like authentication tokens or - contact info are only partially populated on - retrieval. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, notification_channel]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.UpdateNotificationChannelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.UpdateNotificationChannelRequest): - request = notification_service.UpdateNotificationChannelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if update_mask is not None: - request.update_mask = update_mask - if notification_channel is not None: - request.notification_channel = notification_channel - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_notification_channel] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("notification_channel.name", request.notification_channel.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_notification_channel(self, - request: notification_service.DeleteNotificationChannelRequest = None, - *, - name: str = None, - force: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a notification channel. - - Args: - request (google.cloud.monitoring_v3.types.DeleteNotificationChannelRequest): - The request object. The `DeleteNotificationChannel` - request. - name (str): - Required. The channel for which to execute the request. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (bool): - If true, the notification channel - will be deleted regardless of its use in - alert policies (the policies will be - updated to remove the channel). If - false, channels that are still - referenced by an existing alerting - policy will fail to be deleted in a - delete operation. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.DeleteNotificationChannelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.DeleteNotificationChannelRequest): - request = notification_service.DeleteNotificationChannelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_notification_channel] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def send_notification_channel_verification_code(self, - request: notification_service.SendNotificationChannelVerificationCodeRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Causes a verification code to be delivered to the channel. The - code can then be supplied in ``VerifyNotificationChannel`` to - verify the channel. - - Args: - request (google.cloud.monitoring_v3.types.SendNotificationChannelVerificationCodeRequest): - The request object. The - `SendNotificationChannelVerificationCode` request. - name (str): - Required. The notification channel to - which to send a verification code. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.SendNotificationChannelVerificationCodeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.SendNotificationChannelVerificationCodeRequest): - request = notification_service.SendNotificationChannelVerificationCodeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.send_notification_channel_verification_code] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_notification_channel_verification_code(self, - request: notification_service.GetNotificationChannelVerificationCodeRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification_service.GetNotificationChannelVerificationCodeResponse: - r"""Requests a verification code for an already verified - channel that can then be used in a call to - VerifyNotificationChannel() on a different channel with - an equivalent identity in the same or in a different - project. This makes it possible to copy a channel - between projects without requiring manual reverification - of the channel. If the channel is not in the verified - state, this method will fail (in other words, this may - only be used if the - SendNotificationChannelVerificationCode and - VerifyNotificationChannel paths have already been used - to put the given channel into the verified state). - - There is no guarantee that the verification codes - returned by this method will be of a similar structure - or form as the ones that are delivered to the channel - via SendNotificationChannelVerificationCode; while - VerifyNotificationChannel() will recognize both the - codes delivered via - SendNotificationChannelVerificationCode() and returned - from GetNotificationChannelVerificationCode(), it is - typically the case that the verification codes delivered - via - SendNotificationChannelVerificationCode() will be - shorter and also have a shorter expiration (e.g. codes - such as "G-123456") whereas GetVerificationCode() will - typically return a much longer, websafe base 64 encoded - string that has a longer expiration time. - - Args: - request (google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeRequest): - The request object. The - `GetNotificationChannelVerificationCode` request. - name (str): - Required. The notification channel - for which a verification code is to be - generated and retrieved. This must name - a channel that is already verified; if - the specified channel is not verified, - the request will fail. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeResponse: - The GetNotificationChannelVerificationCode request. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.GetNotificationChannelVerificationCodeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.GetNotificationChannelVerificationCodeRequest): - request = notification_service.GetNotificationChannelVerificationCodeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_notification_channel_verification_code] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def verify_notification_channel(self, - request: notification_service.VerifyNotificationChannelRequest = None, - *, - name: str = None, - code: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> notification.NotificationChannel: - r"""Verifies a ``NotificationChannel`` by proving receipt of the - code delivered to the channel as a result of calling - ``SendNotificationChannelVerificationCode``. - - Args: - request (google.cloud.monitoring_v3.types.VerifyNotificationChannelRequest): - The request object. The `VerifyNotificationChannel` - request. - name (str): - Required. The notification channel to - verify. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - code (str): - Required. The verification code that was delivered to - the channel as a result of invoking the - ``SendNotificationChannelVerificationCode`` API method - or that was retrieved from a verified channel via - ``GetNotificationChannelVerificationCode``. For example, - one might have "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" - (in general, one is only guaranteed that the code is - valid UTF-8; one should not make any assumptions - regarding the structure or format of the code). - - This corresponds to the ``code`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.NotificationChannel: - A NotificationChannel is a medium through which an alert is - delivered when a policy violation is detected. - Examples of channels include email, SMS, and - third-party messaging applications. Fields containing - sensitive information like authentication tokens or - contact info are only partially populated on - retrieval. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, code]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a notification_service.VerifyNotificationChannelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, notification_service.VerifyNotificationChannelRequest): - request = notification_service.VerifyNotificationChannelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if code is not None: - request.code = code - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.verify_notification_channel] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "NotificationChannelServiceClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/pagers.py deleted file mode 100644 index 82f80120..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/pagers.py +++ /dev/null @@ -1,263 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.monitoring_v3.types import notification -from google.cloud.monitoring_v3.types import notification_service - - -class ListNotificationChannelDescriptorsPager: - """A pager for iterating through ``list_notification_channel_descriptors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``channel_descriptors`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListNotificationChannelDescriptors`` requests and continue to iterate - through the ``channel_descriptors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., notification_service.ListNotificationChannelDescriptorsResponse], - request: notification_service.ListNotificationChannelDescriptorsRequest, - response: notification_service.ListNotificationChannelDescriptorsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = notification_service.ListNotificationChannelDescriptorsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[notification_service.ListNotificationChannelDescriptorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[notification.NotificationChannelDescriptor]: - for page in self.pages: - yield from page.channel_descriptors - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListNotificationChannelDescriptorsAsyncPager: - """A pager for iterating through ``list_notification_channel_descriptors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``channel_descriptors`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListNotificationChannelDescriptors`` requests and continue to iterate - through the ``channel_descriptors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[notification_service.ListNotificationChannelDescriptorsResponse]], - request: notification_service.ListNotificationChannelDescriptorsRequest, - response: notification_service.ListNotificationChannelDescriptorsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = notification_service.ListNotificationChannelDescriptorsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[notification_service.ListNotificationChannelDescriptorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[notification.NotificationChannelDescriptor]: - async def async_generator(): - async for page in self.pages: - for response in page.channel_descriptors: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListNotificationChannelsPager: - """A pager for iterating through ``list_notification_channels`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListNotificationChannelsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``notification_channels`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListNotificationChannels`` requests and continue to iterate - through the ``notification_channels`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListNotificationChannelsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., notification_service.ListNotificationChannelsResponse], - request: notification_service.ListNotificationChannelsRequest, - response: notification_service.ListNotificationChannelsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListNotificationChannelsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListNotificationChannelsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = notification_service.ListNotificationChannelsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[notification_service.ListNotificationChannelsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[notification.NotificationChannel]: - for page in self.pages: - yield from page.notification_channels - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListNotificationChannelsAsyncPager: - """A pager for iterating through ``list_notification_channels`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListNotificationChannelsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``notification_channels`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListNotificationChannels`` requests and continue to iterate - through the ``notification_channels`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListNotificationChannelsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[notification_service.ListNotificationChannelsResponse]], - request: notification_service.ListNotificationChannelsRequest, - response: notification_service.ListNotificationChannelsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListNotificationChannelsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListNotificationChannelsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = notification_service.ListNotificationChannelsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[notification_service.ListNotificationChannelsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[notification.NotificationChannel]: - async def async_generator(): - async for page in self.pages: - for response in page.notification_channels: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/__init__.py deleted file mode 100644 index 363051c8..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import NotificationChannelServiceTransport -from .grpc import NotificationChannelServiceGrpcTransport -from .grpc_asyncio import NotificationChannelServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[NotificationChannelServiceTransport]] -_transport_registry['grpc'] = NotificationChannelServiceGrpcTransport -_transport_registry['grpc_asyncio'] = NotificationChannelServiceGrpcAsyncIOTransport - -__all__ = ( - 'NotificationChannelServiceTransport', - 'NotificationChannelServiceGrpcTransport', - 'NotificationChannelServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py deleted file mode 100644 index 046be367..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py +++ /dev/null @@ -1,340 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.types import notification -from google.cloud.monitoring_v3.types import notification_service -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-monitoring', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class NotificationChannelServiceTransport(abc.ABC): - """Abstract transport class for NotificationChannelService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ) - - DEFAULT_HOST: str = 'monitoring.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_notification_channel_descriptors: gapic_v1.method.wrap_method( - self.list_notification_channel_descriptors, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.get_notification_channel_descriptor: gapic_v1.method.wrap_method( - self.get_notification_channel_descriptor, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_notification_channels: gapic_v1.method.wrap_method( - self.list_notification_channels, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.get_notification_channel: gapic_v1.method.wrap_method( - self.get_notification_channel, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.create_notification_channel: gapic_v1.method.wrap_method( - self.create_notification_channel, - default_timeout=30.0, - client_info=client_info, - ), - self.update_notification_channel: gapic_v1.method.wrap_method( - self.update_notification_channel, - default_timeout=30.0, - client_info=client_info, - ), - self.delete_notification_channel: gapic_v1.method.wrap_method( - self.delete_notification_channel, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.send_notification_channel_verification_code: gapic_v1.method.wrap_method( - self.send_notification_channel_verification_code, - default_timeout=30.0, - client_info=client_info, - ), - self.get_notification_channel_verification_code: gapic_v1.method.wrap_method( - self.get_notification_channel_verification_code, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.verify_notification_channel: gapic_v1.method.wrap_method( - self.verify_notification_channel, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - } - - @property - def list_notification_channel_descriptors(self) -> Callable[ - [notification_service.ListNotificationChannelDescriptorsRequest], - Union[ - notification_service.ListNotificationChannelDescriptorsResponse, - Awaitable[notification_service.ListNotificationChannelDescriptorsResponse] - ]]: - raise NotImplementedError() - - @property - def get_notification_channel_descriptor(self) -> Callable[ - [notification_service.GetNotificationChannelDescriptorRequest], - Union[ - notification.NotificationChannelDescriptor, - Awaitable[notification.NotificationChannelDescriptor] - ]]: - raise NotImplementedError() - - @property - def list_notification_channels(self) -> Callable[ - [notification_service.ListNotificationChannelsRequest], - Union[ - notification_service.ListNotificationChannelsResponse, - Awaitable[notification_service.ListNotificationChannelsResponse] - ]]: - raise NotImplementedError() - - @property - def get_notification_channel(self) -> Callable[ - [notification_service.GetNotificationChannelRequest], - Union[ - notification.NotificationChannel, - Awaitable[notification.NotificationChannel] - ]]: - raise NotImplementedError() - - @property - def create_notification_channel(self) -> Callable[ - [notification_service.CreateNotificationChannelRequest], - Union[ - notification.NotificationChannel, - Awaitable[notification.NotificationChannel] - ]]: - raise NotImplementedError() - - @property - def update_notification_channel(self) -> Callable[ - [notification_service.UpdateNotificationChannelRequest], - Union[ - notification.NotificationChannel, - Awaitable[notification.NotificationChannel] - ]]: - raise NotImplementedError() - - @property - def delete_notification_channel(self) -> Callable[ - [notification_service.DeleteNotificationChannelRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def send_notification_channel_verification_code(self) -> Callable[ - [notification_service.SendNotificationChannelVerificationCodeRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_notification_channel_verification_code(self) -> Callable[ - [notification_service.GetNotificationChannelVerificationCodeRequest], - Union[ - notification_service.GetNotificationChannelVerificationCodeResponse, - Awaitable[notification_service.GetNotificationChannelVerificationCodeResponse] - ]]: - raise NotImplementedError() - - @property - def verify_notification_channel(self) -> Callable[ - [notification_service.VerifyNotificationChannelRequest], - Union[ - notification.NotificationChannel, - Awaitable[notification.NotificationChannel] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'NotificationChannelServiceTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py deleted file mode 100644 index b29d26ab..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py +++ /dev/null @@ -1,538 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.monitoring_v3.types import notification -from google.cloud.monitoring_v3.types import notification_service -from google.protobuf import empty_pb2 # type: ignore -from .base import NotificationChannelServiceTransport, DEFAULT_CLIENT_INFO - - -class NotificationChannelServiceGrpcTransport(NotificationChannelServiceTransport): - """gRPC backend transport for NotificationChannelService. - - The Notification Channel API provides access to configuration - that controls how messages related to incidents are sent. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_notification_channel_descriptors(self) -> Callable[ - [notification_service.ListNotificationChannelDescriptorsRequest], - notification_service.ListNotificationChannelDescriptorsResponse]: - r"""Return a callable for the list notification channel - descriptors method over gRPC. - - Lists the descriptors for supported channel types. - The use of descriptors makes it possible for new channel - types to be dynamically added. - - Returns: - Callable[[~.ListNotificationChannelDescriptorsRequest], - ~.ListNotificationChannelDescriptorsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_notification_channel_descriptors' not in self._stubs: - self._stubs['list_notification_channel_descriptors'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/ListNotificationChannelDescriptors', - request_serializer=notification_service.ListNotificationChannelDescriptorsRequest.serialize, - response_deserializer=notification_service.ListNotificationChannelDescriptorsResponse.deserialize, - ) - return self._stubs['list_notification_channel_descriptors'] - - @property - def get_notification_channel_descriptor(self) -> Callable[ - [notification_service.GetNotificationChannelDescriptorRequest], - notification.NotificationChannelDescriptor]: - r"""Return a callable for the get notification channel - descriptor method over gRPC. - - Gets a single channel descriptor. The descriptor - indicates which fields are expected / permitted for a - notification channel of the given type. - - Returns: - Callable[[~.GetNotificationChannelDescriptorRequest], - ~.NotificationChannelDescriptor]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_notification_channel_descriptor' not in self._stubs: - self._stubs['get_notification_channel_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/GetNotificationChannelDescriptor', - request_serializer=notification_service.GetNotificationChannelDescriptorRequest.serialize, - response_deserializer=notification.NotificationChannelDescriptor.deserialize, - ) - return self._stubs['get_notification_channel_descriptor'] - - @property - def list_notification_channels(self) -> Callable[ - [notification_service.ListNotificationChannelsRequest], - notification_service.ListNotificationChannelsResponse]: - r"""Return a callable for the list notification channels method over gRPC. - - Lists the notification channels that have been - created for the project. - - Returns: - Callable[[~.ListNotificationChannelsRequest], - ~.ListNotificationChannelsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_notification_channels' not in self._stubs: - self._stubs['list_notification_channels'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/ListNotificationChannels', - request_serializer=notification_service.ListNotificationChannelsRequest.serialize, - response_deserializer=notification_service.ListNotificationChannelsResponse.deserialize, - ) - return self._stubs['list_notification_channels'] - - @property - def get_notification_channel(self) -> Callable[ - [notification_service.GetNotificationChannelRequest], - notification.NotificationChannel]: - r"""Return a callable for the get notification channel method over gRPC. - - Gets a single notification channel. The channel - includes the relevant configuration details with which - the channel was created. However, the response may - truncate or omit passwords, API keys, or other private - key matter and thus the response may not be 100% - identical to the information that was supplied in the - call to the create method. - - Returns: - Callable[[~.GetNotificationChannelRequest], - ~.NotificationChannel]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_notification_channel' not in self._stubs: - self._stubs['get_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/GetNotificationChannel', - request_serializer=notification_service.GetNotificationChannelRequest.serialize, - response_deserializer=notification.NotificationChannel.deserialize, - ) - return self._stubs['get_notification_channel'] - - @property - def create_notification_channel(self) -> Callable[ - [notification_service.CreateNotificationChannelRequest], - notification.NotificationChannel]: - r"""Return a callable for the create notification channel method over gRPC. - - Creates a new notification channel, representing a - single notification endpoint such as an email address, - SMS number, or PagerDuty service. - - Returns: - Callable[[~.CreateNotificationChannelRequest], - ~.NotificationChannel]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_notification_channel' not in self._stubs: - self._stubs['create_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/CreateNotificationChannel', - request_serializer=notification_service.CreateNotificationChannelRequest.serialize, - response_deserializer=notification.NotificationChannel.deserialize, - ) - return self._stubs['create_notification_channel'] - - @property - def update_notification_channel(self) -> Callable[ - [notification_service.UpdateNotificationChannelRequest], - notification.NotificationChannel]: - r"""Return a callable for the update notification channel method over gRPC. - - Updates a notification channel. Fields not specified - in the field mask remain unchanged. - - Returns: - Callable[[~.UpdateNotificationChannelRequest], - ~.NotificationChannel]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_notification_channel' not in self._stubs: - self._stubs['update_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/UpdateNotificationChannel', - request_serializer=notification_service.UpdateNotificationChannelRequest.serialize, - response_deserializer=notification.NotificationChannel.deserialize, - ) - return self._stubs['update_notification_channel'] - - @property - def delete_notification_channel(self) -> Callable[ - [notification_service.DeleteNotificationChannelRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete notification channel method over gRPC. - - Deletes a notification channel. - - Returns: - Callable[[~.DeleteNotificationChannelRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_notification_channel' not in self._stubs: - self._stubs['delete_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/DeleteNotificationChannel', - request_serializer=notification_service.DeleteNotificationChannelRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_notification_channel'] - - @property - def send_notification_channel_verification_code(self) -> Callable[ - [notification_service.SendNotificationChannelVerificationCodeRequest], - empty_pb2.Empty]: - r"""Return a callable for the send notification channel - verification code method over gRPC. - - Causes a verification code to be delivered to the channel. The - code can then be supplied in ``VerifyNotificationChannel`` to - verify the channel. - - Returns: - Callable[[~.SendNotificationChannelVerificationCodeRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'send_notification_channel_verification_code' not in self._stubs: - self._stubs['send_notification_channel_verification_code'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/SendNotificationChannelVerificationCode', - request_serializer=notification_service.SendNotificationChannelVerificationCodeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['send_notification_channel_verification_code'] - - @property - def get_notification_channel_verification_code(self) -> Callable[ - [notification_service.GetNotificationChannelVerificationCodeRequest], - notification_service.GetNotificationChannelVerificationCodeResponse]: - r"""Return a callable for the get notification channel - verification code method over gRPC. - - Requests a verification code for an already verified - channel that can then be used in a call to - VerifyNotificationChannel() on a different channel with - an equivalent identity in the same or in a different - project. This makes it possible to copy a channel - between projects without requiring manual reverification - of the channel. If the channel is not in the verified - state, this method will fail (in other words, this may - only be used if the - SendNotificationChannelVerificationCode and - VerifyNotificationChannel paths have already been used - to put the given channel into the verified state). - - There is no guarantee that the verification codes - returned by this method will be of a similar structure - or form as the ones that are delivered to the channel - via SendNotificationChannelVerificationCode; while - VerifyNotificationChannel() will recognize both the - codes delivered via - SendNotificationChannelVerificationCode() and returned - from GetNotificationChannelVerificationCode(), it is - typically the case that the verification codes delivered - via - SendNotificationChannelVerificationCode() will be - shorter and also have a shorter expiration (e.g. codes - such as "G-123456") whereas GetVerificationCode() will - typically return a much longer, websafe base 64 encoded - string that has a longer expiration time. - - Returns: - Callable[[~.GetNotificationChannelVerificationCodeRequest], - ~.GetNotificationChannelVerificationCodeResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_notification_channel_verification_code' not in self._stubs: - self._stubs['get_notification_channel_verification_code'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/GetNotificationChannelVerificationCode', - request_serializer=notification_service.GetNotificationChannelVerificationCodeRequest.serialize, - response_deserializer=notification_service.GetNotificationChannelVerificationCodeResponse.deserialize, - ) - return self._stubs['get_notification_channel_verification_code'] - - @property - def verify_notification_channel(self) -> Callable[ - [notification_service.VerifyNotificationChannelRequest], - notification.NotificationChannel]: - r"""Return a callable for the verify notification channel method over gRPC. - - Verifies a ``NotificationChannel`` by proving receipt of the - code delivered to the channel as a result of calling - ``SendNotificationChannelVerificationCode``. - - Returns: - Callable[[~.VerifyNotificationChannelRequest], - ~.NotificationChannel]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'verify_notification_channel' not in self._stubs: - self._stubs['verify_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/VerifyNotificationChannel', - request_serializer=notification_service.VerifyNotificationChannelRequest.serialize, - response_deserializer=notification.NotificationChannel.deserialize, - ) - return self._stubs['verify_notification_channel'] - - -__all__ = ( - 'NotificationChannelServiceGrpcTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py deleted file mode 100644 index f17cc7c2..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,542 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.monitoring_v3.types import notification -from google.cloud.monitoring_v3.types import notification_service -from google.protobuf import empty_pb2 # type: ignore -from .base import NotificationChannelServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import NotificationChannelServiceGrpcTransport - - -class NotificationChannelServiceGrpcAsyncIOTransport(NotificationChannelServiceTransport): - """gRPC AsyncIO backend transport for NotificationChannelService. - - The Notification Channel API provides access to configuration - that controls how messages related to incidents are sent. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_notification_channel_descriptors(self) -> Callable[ - [notification_service.ListNotificationChannelDescriptorsRequest], - Awaitable[notification_service.ListNotificationChannelDescriptorsResponse]]: - r"""Return a callable for the list notification channel - descriptors method over gRPC. - - Lists the descriptors for supported channel types. - The use of descriptors makes it possible for new channel - types to be dynamically added. - - Returns: - Callable[[~.ListNotificationChannelDescriptorsRequest], - Awaitable[~.ListNotificationChannelDescriptorsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_notification_channel_descriptors' not in self._stubs: - self._stubs['list_notification_channel_descriptors'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/ListNotificationChannelDescriptors', - request_serializer=notification_service.ListNotificationChannelDescriptorsRequest.serialize, - response_deserializer=notification_service.ListNotificationChannelDescriptorsResponse.deserialize, - ) - return self._stubs['list_notification_channel_descriptors'] - - @property - def get_notification_channel_descriptor(self) -> Callable[ - [notification_service.GetNotificationChannelDescriptorRequest], - Awaitable[notification.NotificationChannelDescriptor]]: - r"""Return a callable for the get notification channel - descriptor method over gRPC. - - Gets a single channel descriptor. The descriptor - indicates which fields are expected / permitted for a - notification channel of the given type. - - Returns: - Callable[[~.GetNotificationChannelDescriptorRequest], - Awaitable[~.NotificationChannelDescriptor]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_notification_channel_descriptor' not in self._stubs: - self._stubs['get_notification_channel_descriptor'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/GetNotificationChannelDescriptor', - request_serializer=notification_service.GetNotificationChannelDescriptorRequest.serialize, - response_deserializer=notification.NotificationChannelDescriptor.deserialize, - ) - return self._stubs['get_notification_channel_descriptor'] - - @property - def list_notification_channels(self) -> Callable[ - [notification_service.ListNotificationChannelsRequest], - Awaitable[notification_service.ListNotificationChannelsResponse]]: - r"""Return a callable for the list notification channels method over gRPC. - - Lists the notification channels that have been - created for the project. - - Returns: - Callable[[~.ListNotificationChannelsRequest], - Awaitable[~.ListNotificationChannelsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_notification_channels' not in self._stubs: - self._stubs['list_notification_channels'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/ListNotificationChannels', - request_serializer=notification_service.ListNotificationChannelsRequest.serialize, - response_deserializer=notification_service.ListNotificationChannelsResponse.deserialize, - ) - return self._stubs['list_notification_channels'] - - @property - def get_notification_channel(self) -> Callable[ - [notification_service.GetNotificationChannelRequest], - Awaitable[notification.NotificationChannel]]: - r"""Return a callable for the get notification channel method over gRPC. - - Gets a single notification channel. The channel - includes the relevant configuration details with which - the channel was created. However, the response may - truncate or omit passwords, API keys, or other private - key matter and thus the response may not be 100% - identical to the information that was supplied in the - call to the create method. - - Returns: - Callable[[~.GetNotificationChannelRequest], - Awaitable[~.NotificationChannel]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_notification_channel' not in self._stubs: - self._stubs['get_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/GetNotificationChannel', - request_serializer=notification_service.GetNotificationChannelRequest.serialize, - response_deserializer=notification.NotificationChannel.deserialize, - ) - return self._stubs['get_notification_channel'] - - @property - def create_notification_channel(self) -> Callable[ - [notification_service.CreateNotificationChannelRequest], - Awaitable[notification.NotificationChannel]]: - r"""Return a callable for the create notification channel method over gRPC. - - Creates a new notification channel, representing a - single notification endpoint such as an email address, - SMS number, or PagerDuty service. - - Returns: - Callable[[~.CreateNotificationChannelRequest], - Awaitable[~.NotificationChannel]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_notification_channel' not in self._stubs: - self._stubs['create_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/CreateNotificationChannel', - request_serializer=notification_service.CreateNotificationChannelRequest.serialize, - response_deserializer=notification.NotificationChannel.deserialize, - ) - return self._stubs['create_notification_channel'] - - @property - def update_notification_channel(self) -> Callable[ - [notification_service.UpdateNotificationChannelRequest], - Awaitable[notification.NotificationChannel]]: - r"""Return a callable for the update notification channel method over gRPC. - - Updates a notification channel. Fields not specified - in the field mask remain unchanged. - - Returns: - Callable[[~.UpdateNotificationChannelRequest], - Awaitable[~.NotificationChannel]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_notification_channel' not in self._stubs: - self._stubs['update_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/UpdateNotificationChannel', - request_serializer=notification_service.UpdateNotificationChannelRequest.serialize, - response_deserializer=notification.NotificationChannel.deserialize, - ) - return self._stubs['update_notification_channel'] - - @property - def delete_notification_channel(self) -> Callable[ - [notification_service.DeleteNotificationChannelRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete notification channel method over gRPC. - - Deletes a notification channel. - - Returns: - Callable[[~.DeleteNotificationChannelRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_notification_channel' not in self._stubs: - self._stubs['delete_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/DeleteNotificationChannel', - request_serializer=notification_service.DeleteNotificationChannelRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_notification_channel'] - - @property - def send_notification_channel_verification_code(self) -> Callable[ - [notification_service.SendNotificationChannelVerificationCodeRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the send notification channel - verification code method over gRPC. - - Causes a verification code to be delivered to the channel. The - code can then be supplied in ``VerifyNotificationChannel`` to - verify the channel. - - Returns: - Callable[[~.SendNotificationChannelVerificationCodeRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'send_notification_channel_verification_code' not in self._stubs: - self._stubs['send_notification_channel_verification_code'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/SendNotificationChannelVerificationCode', - request_serializer=notification_service.SendNotificationChannelVerificationCodeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['send_notification_channel_verification_code'] - - @property - def get_notification_channel_verification_code(self) -> Callable[ - [notification_service.GetNotificationChannelVerificationCodeRequest], - Awaitable[notification_service.GetNotificationChannelVerificationCodeResponse]]: - r"""Return a callable for the get notification channel - verification code method over gRPC. - - Requests a verification code for an already verified - channel that can then be used in a call to - VerifyNotificationChannel() on a different channel with - an equivalent identity in the same or in a different - project. This makes it possible to copy a channel - between projects without requiring manual reverification - of the channel. If the channel is not in the verified - state, this method will fail (in other words, this may - only be used if the - SendNotificationChannelVerificationCode and - VerifyNotificationChannel paths have already been used - to put the given channel into the verified state). - - There is no guarantee that the verification codes - returned by this method will be of a similar structure - or form as the ones that are delivered to the channel - via SendNotificationChannelVerificationCode; while - VerifyNotificationChannel() will recognize both the - codes delivered via - SendNotificationChannelVerificationCode() and returned - from GetNotificationChannelVerificationCode(), it is - typically the case that the verification codes delivered - via - SendNotificationChannelVerificationCode() will be - shorter and also have a shorter expiration (e.g. codes - such as "G-123456") whereas GetVerificationCode() will - typically return a much longer, websafe base 64 encoded - string that has a longer expiration time. - - Returns: - Callable[[~.GetNotificationChannelVerificationCodeRequest], - Awaitable[~.GetNotificationChannelVerificationCodeResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_notification_channel_verification_code' not in self._stubs: - self._stubs['get_notification_channel_verification_code'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/GetNotificationChannelVerificationCode', - request_serializer=notification_service.GetNotificationChannelVerificationCodeRequest.serialize, - response_deserializer=notification_service.GetNotificationChannelVerificationCodeResponse.deserialize, - ) - return self._stubs['get_notification_channel_verification_code'] - - @property - def verify_notification_channel(self) -> Callable[ - [notification_service.VerifyNotificationChannelRequest], - Awaitable[notification.NotificationChannel]]: - r"""Return a callable for the verify notification channel method over gRPC. - - Verifies a ``NotificationChannel`` by proving receipt of the - code delivered to the channel as a result of calling - ``SendNotificationChannelVerificationCode``. - - Returns: - Callable[[~.VerifyNotificationChannelRequest], - Awaitable[~.NotificationChannel]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'verify_notification_channel' not in self._stubs: - self._stubs['verify_notification_channel'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.NotificationChannelService/VerifyNotificationChannel', - request_serializer=notification_service.VerifyNotificationChannelRequest.serialize, - response_deserializer=notification.NotificationChannel.deserialize, - ) - return self._stubs['verify_notification_channel'] - - -__all__ = ( - 'NotificationChannelServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/__init__.py deleted file mode 100644 index f4b8a9c1..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import QueryServiceClient -from .async_client import QueryServiceAsyncClient - -__all__ = ( - 'QueryServiceClient', - 'QueryServiceAsyncClient', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/async_client.py deleted file mode 100644 index d4f0d382..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/async_client.py +++ /dev/null @@ -1,231 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.services.query_service import pagers -from google.cloud.monitoring_v3.types import metric -from google.cloud.monitoring_v3.types import metric_service -from .transports.base import QueryServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import QueryServiceGrpcAsyncIOTransport -from .client import QueryServiceClient - - -class QueryServiceAsyncClient: - """The QueryService API is used to manage time series data in - Stackdriver Monitoring. Time series data is a collection of data - points that describes the time-varying values of a metric. - """ - - _client: QueryServiceClient - - DEFAULT_ENDPOINT = QueryServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = QueryServiceClient.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(QueryServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(QueryServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(QueryServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(QueryServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(QueryServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(QueryServiceClient.parse_common_organization_path) - common_project_path = staticmethod(QueryServiceClient.common_project_path) - parse_common_project_path = staticmethod(QueryServiceClient.parse_common_project_path) - common_location_path = staticmethod(QueryServiceClient.common_location_path) - parse_common_location_path = staticmethod(QueryServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - QueryServiceAsyncClient: The constructed client. - """ - return QueryServiceClient.from_service_account_info.__func__(QueryServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - QueryServiceAsyncClient: The constructed client. - """ - return QueryServiceClient.from_service_account_file.__func__(QueryServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> QueryServiceTransport: - """Returns the transport used by the client instance. - - Returns: - QueryServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(QueryServiceClient).get_transport_class, type(QueryServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, QueryServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the query service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.QueryServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = QueryServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def query_time_series(self, - request: metric_service.QueryTimeSeriesRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.QueryTimeSeriesAsyncPager: - r"""Queries time series using Monitoring Query Language. - This method does not require a Workspace. - - Args: - request (:class:`google.cloud.monitoring_v3.types.QueryTimeSeriesRequest`): - The request object. The `QueryTimeSeries` request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.query_service.pagers.QueryTimeSeriesAsyncPager: - The QueryTimeSeries response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - request = metric_service.QueryTimeSeriesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.query_time_series, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.QueryTimeSeriesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "QueryServiceAsyncClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/client.py deleted file mode 100644 index e36c6441..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/client.py +++ /dev/null @@ -1,414 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.services.query_service import pagers -from google.cloud.monitoring_v3.types import metric -from google.cloud.monitoring_v3.types import metric_service -from .transports.base import QueryServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import QueryServiceGrpcTransport -from .transports.grpc_asyncio import QueryServiceGrpcAsyncIOTransport - - -class QueryServiceClientMeta(type): - """Metaclass for the QueryService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[QueryServiceTransport]] - _transport_registry["grpc"] = QueryServiceGrpcTransport - _transport_registry["grpc_asyncio"] = QueryServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[QueryServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class QueryServiceClient(metaclass=QueryServiceClientMeta): - """The QueryService API is used to manage time series data in - Stackdriver Monitoring. Time series data is a collection of data - points that describes the time-varying values of a metric. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "monitoring.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - QueryServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - QueryServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> QueryServiceTransport: - """Returns the transport used by the client instance. - - Returns: - QueryServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, QueryServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the query service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, QueryServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, QueryServiceTransport): - # transport is a QueryServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def query_time_series(self, - request: metric_service.QueryTimeSeriesRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.QueryTimeSeriesPager: - r"""Queries time series using Monitoring Query Language. - This method does not require a Workspace. - - Args: - request (google.cloud.monitoring_v3.types.QueryTimeSeriesRequest): - The request object. The `QueryTimeSeries` request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.query_service.pagers.QueryTimeSeriesPager: - The QueryTimeSeries response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a metric_service.QueryTimeSeriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metric_service.QueryTimeSeriesRequest): - request = metric_service.QueryTimeSeriesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.query_time_series] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.QueryTimeSeriesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "QueryServiceClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/pagers.py deleted file mode 100644 index f83cde37..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/pagers.py +++ /dev/null @@ -1,141 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.monitoring_v3.types import metric -from google.cloud.monitoring_v3.types import metric_service - - -class QueryTimeSeriesPager: - """A pager for iterating through ``query_time_series`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.QueryTimeSeriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``time_series_data`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``QueryTimeSeries`` requests and continue to iterate - through the ``time_series_data`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.QueryTimeSeriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metric_service.QueryTimeSeriesResponse], - request: metric_service.QueryTimeSeriesRequest, - response: metric_service.QueryTimeSeriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.QueryTimeSeriesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.QueryTimeSeriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metric_service.QueryTimeSeriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[metric_service.QueryTimeSeriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[metric.TimeSeriesData]: - for page in self.pages: - yield from page.time_series_data - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class QueryTimeSeriesAsyncPager: - """A pager for iterating through ``query_time_series`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.QueryTimeSeriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``time_series_data`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``QueryTimeSeries`` requests and continue to iterate - through the ``time_series_data`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.QueryTimeSeriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metric_service.QueryTimeSeriesResponse]], - request: metric_service.QueryTimeSeriesRequest, - response: metric_service.QueryTimeSeriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.QueryTimeSeriesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.QueryTimeSeriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metric_service.QueryTimeSeriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[metric_service.QueryTimeSeriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[metric.TimeSeriesData]: - async def async_generator(): - async for page in self.pages: - for response in page.time_series_data: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/__init__.py deleted file mode 100644 index 7b5d1cad..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import QueryServiceTransport -from .grpc import QueryServiceGrpcTransport -from .grpc_asyncio import QueryServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[QueryServiceTransport]] -_transport_registry['grpc'] = QueryServiceGrpcTransport -_transport_registry['grpc_asyncio'] = QueryServiceGrpcAsyncIOTransport - -__all__ = ( - 'QueryServiceTransport', - 'QueryServiceGrpcTransport', - 'QueryServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/base.py deleted file mode 100644 index 9ddc63af..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/base.py +++ /dev/null @@ -1,170 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.types import metric_service - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-monitoring', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class QueryServiceTransport(abc.ABC): - """Abstract transport class for QueryService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ) - - DEFAULT_HOST: str = 'monitoring.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.query_time_series: gapic_v1.method.wrap_method( - self.query_time_series, - default_timeout=None, - client_info=client_info, - ), - } - - @property - def query_time_series(self) -> Callable[ - [metric_service.QueryTimeSeriesRequest], - Union[ - metric_service.QueryTimeSeriesResponse, - Awaitable[metric_service.QueryTimeSeriesResponse] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'QueryServiceTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc.py deleted file mode 100644 index 28ea91c0..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc.py +++ /dev/null @@ -1,255 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.monitoring_v3.types import metric_service -from .base import QueryServiceTransport, DEFAULT_CLIENT_INFO - - -class QueryServiceGrpcTransport(QueryServiceTransport): - """gRPC backend transport for QueryService. - - The QueryService API is used to manage time series data in - Stackdriver Monitoring. Time series data is a collection of data - points that describes the time-varying values of a metric. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def query_time_series(self) -> Callable[ - [metric_service.QueryTimeSeriesRequest], - metric_service.QueryTimeSeriesResponse]: - r"""Return a callable for the query time series method over gRPC. - - Queries time series using Monitoring Query Language. - This method does not require a Workspace. - - Returns: - Callable[[~.QueryTimeSeriesRequest], - ~.QueryTimeSeriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'query_time_series' not in self._stubs: - self._stubs['query_time_series'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.QueryService/QueryTimeSeries', - request_serializer=metric_service.QueryTimeSeriesRequest.serialize, - response_deserializer=metric_service.QueryTimeSeriesResponse.deserialize, - ) - return self._stubs['query_time_series'] - - -__all__ = ( - 'QueryServiceGrpcTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py deleted file mode 100644 index 4d1c336e..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,259 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.monitoring_v3.types import metric_service -from .base import QueryServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import QueryServiceGrpcTransport - - -class QueryServiceGrpcAsyncIOTransport(QueryServiceTransport): - """gRPC AsyncIO backend transport for QueryService. - - The QueryService API is used to manage time series data in - Stackdriver Monitoring. Time series data is a collection of data - points that describes the time-varying values of a metric. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def query_time_series(self) -> Callable[ - [metric_service.QueryTimeSeriesRequest], - Awaitable[metric_service.QueryTimeSeriesResponse]]: - r"""Return a callable for the query time series method over gRPC. - - Queries time series using Monitoring Query Language. - This method does not require a Workspace. - - Returns: - Callable[[~.QueryTimeSeriesRequest], - Awaitable[~.QueryTimeSeriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'query_time_series' not in self._stubs: - self._stubs['query_time_series'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.QueryService/QueryTimeSeries', - request_serializer=metric_service.QueryTimeSeriesRequest.serialize, - response_deserializer=metric_service.QueryTimeSeriesResponse.deserialize, - ) - return self._stubs['query_time_series'] - - -__all__ = ( - 'QueryServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/__init__.py deleted file mode 100644 index 68f0d87a..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ServiceMonitoringServiceClient -from .async_client import ServiceMonitoringServiceAsyncClient - -__all__ = ( - 'ServiceMonitoringServiceClient', - 'ServiceMonitoringServiceAsyncClient', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py deleted file mode 100644 index f255e172..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py +++ /dev/null @@ -1,1061 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.services.service_monitoring_service import pagers -from google.cloud.monitoring_v3.types import service -from google.cloud.monitoring_v3.types import service as gm_service -from google.cloud.monitoring_v3.types import service_service -from google.protobuf import duration_pb2 # type: ignore -from google.type import calendar_period_pb2 # type: ignore -from .transports.base import ServiceMonitoringServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ServiceMonitoringServiceGrpcAsyncIOTransport -from .client import ServiceMonitoringServiceClient - - -class ServiceMonitoringServiceAsyncClient: - """The Cloud Monitoring Service-Oriented Monitoring API has endpoints - for managing and querying aspects of a workspace's services. These - include the ``Service``'s monitored resources, its Service-Level - Objectives, and a taxonomy of categorized Health Metrics. - """ - - _client: ServiceMonitoringServiceClient - - DEFAULT_ENDPOINT = ServiceMonitoringServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = ServiceMonitoringServiceClient.DEFAULT_MTLS_ENDPOINT - - service_path = staticmethod(ServiceMonitoringServiceClient.service_path) - parse_service_path = staticmethod(ServiceMonitoringServiceClient.parse_service_path) - service_level_objective_path = staticmethod(ServiceMonitoringServiceClient.service_level_objective_path) - parse_service_level_objective_path = staticmethod(ServiceMonitoringServiceClient.parse_service_level_objective_path) - common_billing_account_path = staticmethod(ServiceMonitoringServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ServiceMonitoringServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(ServiceMonitoringServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(ServiceMonitoringServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(ServiceMonitoringServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(ServiceMonitoringServiceClient.parse_common_organization_path) - common_project_path = staticmethod(ServiceMonitoringServiceClient.common_project_path) - parse_common_project_path = staticmethod(ServiceMonitoringServiceClient.parse_common_project_path) - common_location_path = staticmethod(ServiceMonitoringServiceClient.common_location_path) - parse_common_location_path = staticmethod(ServiceMonitoringServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ServiceMonitoringServiceAsyncClient: The constructed client. - """ - return ServiceMonitoringServiceClient.from_service_account_info.__func__(ServiceMonitoringServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ServiceMonitoringServiceAsyncClient: The constructed client. - """ - return ServiceMonitoringServiceClient.from_service_account_file.__func__(ServiceMonitoringServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ServiceMonitoringServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ServiceMonitoringServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(ServiceMonitoringServiceClient).get_transport_class, type(ServiceMonitoringServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, ServiceMonitoringServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the service monitoring service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.ServiceMonitoringServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = ServiceMonitoringServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_service(self, - request: service_service.CreateServiceRequest = None, - *, - parent: str = None, - service: gm_service.Service = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gm_service.Service: - r"""Create a ``Service``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.CreateServiceRequest`): - The request object. The `CreateService` request. - parent (:class:`str`): - Required. Resource - `name `__ - of the parent workspace. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - service (:class:`google.cloud.monitoring_v3.types.Service`): - Required. The ``Service`` to create. - This corresponds to the ``service`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Service: - A Service is a discrete, autonomous, and network-accessible unit, designed - to solve an individual concern - ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). - In Cloud Monitoring, a Service acts as the root - resource under which operational aspects of the - service are accessible. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, service]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.CreateServiceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if service is not None: - request.service = service - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_service, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_service(self, - request: service_service.GetServiceRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.Service: - r"""Get the named ``Service``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetServiceRequest`): - The request object. The `GetService` request. - name (:class:`str`): - Required. Resource name of the ``Service``. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Service: - A Service is a discrete, autonomous, and network-accessible unit, designed - to solve an individual concern - ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). - In Cloud Monitoring, a Service acts as the root - resource under which operational aspects of the - service are accessible. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.GetServiceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_service, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_services(self, - request: service_service.ListServicesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListServicesAsyncPager: - r"""List ``Service``\ s for this workspace. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListServicesRequest`): - The request object. The `ListServices` request. - parent (:class:`str`): - Required. Resource name of the parent containing the - listed services, either a - `project `__ - or a Monitoring Workspace. The formats are: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - workspaces/[HOST_PROJECT_ID_OR_NUMBER] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.service_monitoring_service.pagers.ListServicesAsyncPager: - The ListServices response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.ListServicesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_services, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListServicesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_service(self, - request: service_service.UpdateServiceRequest = None, - *, - service: gm_service.Service = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gm_service.Service: - r"""Update this ``Service``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.UpdateServiceRequest`): - The request object. The `UpdateService` request. - service (:class:`google.cloud.monitoring_v3.types.Service`): - Required. The ``Service`` to draw updates from. The - given ``name`` specifies the resource to update. - - This corresponds to the ``service`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Service: - A Service is a discrete, autonomous, and network-accessible unit, designed - to solve an individual concern - ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). - In Cloud Monitoring, a Service acts as the root - resource under which operational aspects of the - service are accessible. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([service]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.UpdateServiceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if service is not None: - request.service = service - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_service, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("service.name", request.service.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_service(self, - request: service_service.DeleteServiceRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Soft delete this ``Service``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.DeleteServiceRequest`): - The request object. The `DeleteService` request. - name (:class:`str`): - Required. Resource name of the ``Service`` to delete. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.DeleteServiceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_service, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_service_level_objective(self, - request: service_service.CreateServiceLevelObjectiveRequest = None, - *, - parent: str = None, - service_level_objective: service.ServiceLevelObjective = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.ServiceLevelObjective: - r"""Create a ``ServiceLevelObjective`` for the given ``Service``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.CreateServiceLevelObjectiveRequest`): - The request object. The `CreateServiceLevelObjective` - request. - parent (:class:`str`): - Required. Resource name of the parent ``Service``. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - service_level_objective (:class:`google.cloud.monitoring_v3.types.ServiceLevelObjective`): - Required. The ``ServiceLevelObjective`` to create. The - provided ``name`` will be respected if no - ``ServiceLevelObjective`` exists with this name. - - This corresponds to the ``service_level_objective`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.ServiceLevelObjective: - A Service-Level Objective (SLO) - describes a level of desired good - service. It consists of a service-level - indicator (SLI), a performance goal, and - a period over which the objective is to - be evaluated against that goal. The SLO - can use SLIs defined in a number of - different manners. Typical SLOs might - include "99% of requests in each rolling - week have latency below 200 - milliseconds" or "99.5% of requests in - each calendar month return - successfully." - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, service_level_objective]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.CreateServiceLevelObjectiveRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if service_level_objective is not None: - request.service_level_objective = service_level_objective - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_service_level_objective, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_service_level_objective(self, - request: service_service.GetServiceLevelObjectiveRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.ServiceLevelObjective: - r"""Get a ``ServiceLevelObjective`` by name. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetServiceLevelObjectiveRequest`): - The request object. The `GetServiceLevelObjective` - request. - name (:class:`str`): - Required. Resource name of the ``ServiceLevelObjective`` - to get. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.ServiceLevelObjective: - A Service-Level Objective (SLO) - describes a level of desired good - service. It consists of a service-level - indicator (SLI), a performance goal, and - a period over which the objective is to - be evaluated against that goal. The SLO - can use SLIs defined in a number of - different manners. Typical SLOs might - include "99% of requests in each rolling - week have latency below 200 - milliseconds" or "99.5% of requests in - each calendar month return - successfully." - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.GetServiceLevelObjectiveRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_service_level_objective, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_service_level_objectives(self, - request: service_service.ListServiceLevelObjectivesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListServiceLevelObjectivesAsyncPager: - r"""List the ``ServiceLevelObjective``\ s for the given ``Service``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest`): - The request object. The `ListServiceLevelObjectives` - request. - parent (:class:`str`): - Required. Resource name of the parent containing the - listed SLOs, either a project or a Monitoring Workspace. - The formats are: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - workspaces/[HOST_PROJECT_ID_OR_NUMBER]/services/- - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.service_monitoring_service.pagers.ListServiceLevelObjectivesAsyncPager: - The ListServiceLevelObjectives response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.ListServiceLevelObjectivesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_service_level_objectives, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListServiceLevelObjectivesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_service_level_objective(self, - request: service_service.UpdateServiceLevelObjectiveRequest = None, - *, - service_level_objective: service.ServiceLevelObjective = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.ServiceLevelObjective: - r"""Update the given ``ServiceLevelObjective``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.UpdateServiceLevelObjectiveRequest`): - The request object. The `UpdateServiceLevelObjective` - request. - service_level_objective (:class:`google.cloud.monitoring_v3.types.ServiceLevelObjective`): - Required. The ``ServiceLevelObjective`` to draw updates - from. The given ``name`` specifies the resource to - update. - - This corresponds to the ``service_level_objective`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.ServiceLevelObjective: - A Service-Level Objective (SLO) - describes a level of desired good - service. It consists of a service-level - indicator (SLI), a performance goal, and - a period over which the objective is to - be evaluated against that goal. The SLO - can use SLIs defined in a number of - different manners. Typical SLOs might - include "99% of requests in each rolling - week have latency below 200 - milliseconds" or "99.5% of requests in - each calendar month return - successfully." - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([service_level_objective]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.UpdateServiceLevelObjectiveRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if service_level_objective is not None: - request.service_level_objective = service_level_objective - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_service_level_objective, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("service_level_objective.name", request.service_level_objective.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_service_level_objective(self, - request: service_service.DeleteServiceLevelObjectiveRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete the given ``ServiceLevelObjective``. - - Args: - request (:class:`google.cloud.monitoring_v3.types.DeleteServiceLevelObjectiveRequest`): - The request object. The `DeleteServiceLevelObjective` - request. - name (:class:`str`): - Required. Resource name of the ``ServiceLevelObjective`` - to delete. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = service_service.DeleteServiceLevelObjectiveRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_service_level_objective, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "ServiceMonitoringServiceAsyncClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/client.py deleted file mode 100644 index 59b72d13..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/client.py +++ /dev/null @@ -1,1225 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.services.service_monitoring_service import pagers -from google.cloud.monitoring_v3.types import service -from google.cloud.monitoring_v3.types import service as gm_service -from google.cloud.monitoring_v3.types import service_service -from google.protobuf import duration_pb2 # type: ignore -from google.type import calendar_period_pb2 # type: ignore -from .transports.base import ServiceMonitoringServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import ServiceMonitoringServiceGrpcTransport -from .transports.grpc_asyncio import ServiceMonitoringServiceGrpcAsyncIOTransport - - -class ServiceMonitoringServiceClientMeta(type): - """Metaclass for the ServiceMonitoringService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[ServiceMonitoringServiceTransport]] - _transport_registry["grpc"] = ServiceMonitoringServiceGrpcTransport - _transport_registry["grpc_asyncio"] = ServiceMonitoringServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[ServiceMonitoringServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ServiceMonitoringServiceClient(metaclass=ServiceMonitoringServiceClientMeta): - """The Cloud Monitoring Service-Oriented Monitoring API has endpoints - for managing and querying aspects of a workspace's services. These - include the ``Service``'s monitored resources, its Service-Level - Objectives, and a taxonomy of categorized Health Metrics. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "monitoring.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ServiceMonitoringServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ServiceMonitoringServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ServiceMonitoringServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ServiceMonitoringServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def service_path(project: str,service: str,) -> str: - """Returns a fully-qualified service string.""" - return "projects/{project}/services/{service}".format(project=project, service=service, ) - - @staticmethod - def parse_service_path(path: str) -> Dict[str,str]: - """Parses a service path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/services/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_level_objective_path(project: str,service: str,service_level_objective: str,) -> str: - """Returns a fully-qualified service_level_objective string.""" - return "projects/{project}/services/{service}/serviceLevelObjectives/{service_level_objective}".format(project=project, service=service, service_level_objective=service_level_objective, ) - - @staticmethod - def parse_service_level_objective_path(path: str) -> Dict[str,str]: - """Parses a service_level_objective path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/services/(?P.+?)/serviceLevelObjectives/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ServiceMonitoringServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the service monitoring service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ServiceMonitoringServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, ServiceMonitoringServiceTransport): - # transport is a ServiceMonitoringServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def create_service(self, - request: service_service.CreateServiceRequest = None, - *, - parent: str = None, - service: gm_service.Service = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gm_service.Service: - r"""Create a ``Service``. - - Args: - request (google.cloud.monitoring_v3.types.CreateServiceRequest): - The request object. The `CreateService` request. - parent (str): - Required. Resource - `name `__ - of the parent workspace. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - service (google.cloud.monitoring_v3.types.Service): - Required. The ``Service`` to create. - This corresponds to the ``service`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Service: - A Service is a discrete, autonomous, and network-accessible unit, designed - to solve an individual concern - ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). - In Cloud Monitoring, a Service acts as the root - resource under which operational aspects of the - service are accessible. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, service]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.CreateServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.CreateServiceRequest): - request = service_service.CreateServiceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if service is not None: - request.service = service - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_service(self, - request: service_service.GetServiceRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.Service: - r"""Get the named ``Service``. - - Args: - request (google.cloud.monitoring_v3.types.GetServiceRequest): - The request object. The `GetService` request. - name (str): - Required. Resource name of the ``Service``. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Service: - A Service is a discrete, autonomous, and network-accessible unit, designed - to solve an individual concern - ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). - In Cloud Monitoring, a Service acts as the root - resource under which operational aspects of the - service are accessible. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.GetServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.GetServiceRequest): - request = service_service.GetServiceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_services(self, - request: service_service.ListServicesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListServicesPager: - r"""List ``Service``\ s for this workspace. - - Args: - request (google.cloud.monitoring_v3.types.ListServicesRequest): - The request object. The `ListServices` request. - parent (str): - Required. Resource name of the parent containing the - listed services, either a - `project `__ - or a Monitoring Workspace. The formats are: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - workspaces/[HOST_PROJECT_ID_OR_NUMBER] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.service_monitoring_service.pagers.ListServicesPager: - The ListServices response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.ListServicesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.ListServicesRequest): - request = service_service.ListServicesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_services] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListServicesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_service(self, - request: service_service.UpdateServiceRequest = None, - *, - service: gm_service.Service = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gm_service.Service: - r"""Update this ``Service``. - - Args: - request (google.cloud.monitoring_v3.types.UpdateServiceRequest): - The request object. The `UpdateService` request. - service (google.cloud.monitoring_v3.types.Service): - Required. The ``Service`` to draw updates from. The - given ``name`` specifies the resource to update. - - This corresponds to the ``service`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.Service: - A Service is a discrete, autonomous, and network-accessible unit, designed - to solve an individual concern - ([Wikipedia](https://en.wikipedia.org/wiki/Service-orientation)). - In Cloud Monitoring, a Service acts as the root - resource under which operational aspects of the - service are accessible. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([service]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.UpdateServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.UpdateServiceRequest): - request = service_service.UpdateServiceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if service is not None: - request.service = service - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("service.name", request.service.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_service(self, - request: service_service.DeleteServiceRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Soft delete this ``Service``. - - Args: - request (google.cloud.monitoring_v3.types.DeleteServiceRequest): - The request object. The `DeleteService` request. - name (str): - Required. Resource name of the ``Service`` to delete. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.DeleteServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.DeleteServiceRequest): - request = service_service.DeleteServiceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_service_level_objective(self, - request: service_service.CreateServiceLevelObjectiveRequest = None, - *, - parent: str = None, - service_level_objective: service.ServiceLevelObjective = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.ServiceLevelObjective: - r"""Create a ``ServiceLevelObjective`` for the given ``Service``. - - Args: - request (google.cloud.monitoring_v3.types.CreateServiceLevelObjectiveRequest): - The request object. The `CreateServiceLevelObjective` - request. - parent (str): - Required. Resource name of the parent ``Service``. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - service_level_objective (google.cloud.monitoring_v3.types.ServiceLevelObjective): - Required. The ``ServiceLevelObjective`` to create. The - provided ``name`` will be respected if no - ``ServiceLevelObjective`` exists with this name. - - This corresponds to the ``service_level_objective`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.ServiceLevelObjective: - A Service-Level Objective (SLO) - describes a level of desired good - service. It consists of a service-level - indicator (SLI), a performance goal, and - a period over which the objective is to - be evaluated against that goal. The SLO - can use SLIs defined in a number of - different manners. Typical SLOs might - include "99% of requests in each rolling - week have latency below 200 - milliseconds" or "99.5% of requests in - each calendar month return - successfully." - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, service_level_objective]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.CreateServiceLevelObjectiveRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.CreateServiceLevelObjectiveRequest): - request = service_service.CreateServiceLevelObjectiveRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if service_level_objective is not None: - request.service_level_objective = service_level_objective - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_service_level_objective] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_service_level_objective(self, - request: service_service.GetServiceLevelObjectiveRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.ServiceLevelObjective: - r"""Get a ``ServiceLevelObjective`` by name. - - Args: - request (google.cloud.monitoring_v3.types.GetServiceLevelObjectiveRequest): - The request object. The `GetServiceLevelObjective` - request. - name (str): - Required. Resource name of the ``ServiceLevelObjective`` - to get. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.ServiceLevelObjective: - A Service-Level Objective (SLO) - describes a level of desired good - service. It consists of a service-level - indicator (SLI), a performance goal, and - a period over which the objective is to - be evaluated against that goal. The SLO - can use SLIs defined in a number of - different manners. Typical SLOs might - include "99% of requests in each rolling - week have latency below 200 - milliseconds" or "99.5% of requests in - each calendar month return - successfully." - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.GetServiceLevelObjectiveRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.GetServiceLevelObjectiveRequest): - request = service_service.GetServiceLevelObjectiveRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_service_level_objective] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_service_level_objectives(self, - request: service_service.ListServiceLevelObjectivesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListServiceLevelObjectivesPager: - r"""List the ``ServiceLevelObjective``\ s for the given ``Service``. - - Args: - request (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest): - The request object. The `ListServiceLevelObjectives` - request. - parent (str): - Required. Resource name of the parent containing the - listed SLOs, either a project or a Monitoring Workspace. - The formats are: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - workspaces/[HOST_PROJECT_ID_OR_NUMBER]/services/- - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.service_monitoring_service.pagers.ListServiceLevelObjectivesPager: - The ListServiceLevelObjectives response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.ListServiceLevelObjectivesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.ListServiceLevelObjectivesRequest): - request = service_service.ListServiceLevelObjectivesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_service_level_objectives] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListServiceLevelObjectivesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_service_level_objective(self, - request: service_service.UpdateServiceLevelObjectiveRequest = None, - *, - service_level_objective: service.ServiceLevelObjective = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.ServiceLevelObjective: - r"""Update the given ``ServiceLevelObjective``. - - Args: - request (google.cloud.monitoring_v3.types.UpdateServiceLevelObjectiveRequest): - The request object. The `UpdateServiceLevelObjective` - request. - service_level_objective (google.cloud.monitoring_v3.types.ServiceLevelObjective): - Required. The ``ServiceLevelObjective`` to draw updates - from. The given ``name`` specifies the resource to - update. - - This corresponds to the ``service_level_objective`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.ServiceLevelObjective: - A Service-Level Objective (SLO) - describes a level of desired good - service. It consists of a service-level - indicator (SLI), a performance goal, and - a period over which the objective is to - be evaluated against that goal. The SLO - can use SLIs defined in a number of - different manners. Typical SLOs might - include "99% of requests in each rolling - week have latency below 200 - milliseconds" or "99.5% of requests in - each calendar month return - successfully." - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([service_level_objective]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.UpdateServiceLevelObjectiveRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.UpdateServiceLevelObjectiveRequest): - request = service_service.UpdateServiceLevelObjectiveRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if service_level_objective is not None: - request.service_level_objective = service_level_objective - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_service_level_objective] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("service_level_objective.name", request.service_level_objective.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_service_level_objective(self, - request: service_service.DeleteServiceLevelObjectiveRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete the given ``ServiceLevelObjective``. - - Args: - request (google.cloud.monitoring_v3.types.DeleteServiceLevelObjectiveRequest): - The request object. The `DeleteServiceLevelObjective` - request. - name (str): - Required. Resource name of the ``ServiceLevelObjective`` - to delete. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a service_service.DeleteServiceLevelObjectiveRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, service_service.DeleteServiceLevelObjectiveRequest): - request = service_service.DeleteServiceLevelObjectiveRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_service_level_objective] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "ServiceMonitoringServiceClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/pagers.py deleted file mode 100644 index e9c457ff..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/pagers.py +++ /dev/null @@ -1,263 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.monitoring_v3.types import service -from google.cloud.monitoring_v3.types import service_service - - -class ListServicesPager: - """A pager for iterating through ``list_services`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListServicesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``services`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListServices`` requests and continue to iterate - through the ``services`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListServicesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service_service.ListServicesResponse], - request: service_service.ListServicesRequest, - response: service_service.ListServicesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListServicesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListServicesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service_service.ListServicesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[service_service.ListServicesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[service.Service]: - for page in self.pages: - yield from page.services - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListServicesAsyncPager: - """A pager for iterating through ``list_services`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListServicesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``services`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListServices`` requests and continue to iterate - through the ``services`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListServicesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service_service.ListServicesResponse]], - request: service_service.ListServicesRequest, - response: service_service.ListServicesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListServicesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListServicesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service_service.ListServicesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[service_service.ListServicesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[service.Service]: - async def async_generator(): - async for page in self.pages: - for response in page.services: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListServiceLevelObjectivesPager: - """A pager for iterating through ``list_service_level_objectives`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``service_level_objectives`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListServiceLevelObjectives`` requests and continue to iterate - through the ``service_level_objectives`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service_service.ListServiceLevelObjectivesResponse], - request: service_service.ListServiceLevelObjectivesRequest, - response: service_service.ListServiceLevelObjectivesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service_service.ListServiceLevelObjectivesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[service_service.ListServiceLevelObjectivesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[service.ServiceLevelObjective]: - for page in self.pages: - yield from page.service_level_objectives - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListServiceLevelObjectivesAsyncPager: - """A pager for iterating through ``list_service_level_objectives`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``service_level_objectives`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListServiceLevelObjectives`` requests and continue to iterate - through the ``service_level_objectives`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service_service.ListServiceLevelObjectivesResponse]], - request: service_service.ListServiceLevelObjectivesRequest, - response: service_service.ListServiceLevelObjectivesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListServiceLevelObjectivesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service_service.ListServiceLevelObjectivesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[service_service.ListServiceLevelObjectivesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[service.ServiceLevelObjective]: - async def async_generator(): - async for page in self.pages: - for response in page.service_level_objectives: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/__init__.py deleted file mode 100644 index c51e1a3a..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ServiceMonitoringServiceTransport -from .grpc import ServiceMonitoringServiceGrpcTransport -from .grpc_asyncio import ServiceMonitoringServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ServiceMonitoringServiceTransport]] -_transport_registry['grpc'] = ServiceMonitoringServiceGrpcTransport -_transport_registry['grpc_asyncio'] = ServiceMonitoringServiceGrpcAsyncIOTransport - -__all__ = ( - 'ServiceMonitoringServiceTransport', - 'ServiceMonitoringServiceGrpcTransport', - 'ServiceMonitoringServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py deleted file mode 100644 index 7811feed..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py +++ /dev/null @@ -1,335 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.types import service -from google.cloud.monitoring_v3.types import service as gm_service -from google.cloud.monitoring_v3.types import service_service -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-monitoring', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class ServiceMonitoringServiceTransport(abc.ABC): - """Abstract transport class for ServiceMonitoringService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ) - - DEFAULT_HOST: str = 'monitoring.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_service: gapic_v1.method.wrap_method( - self.create_service, - default_timeout=30.0, - client_info=client_info, - ), - self.get_service: gapic_v1.method.wrap_method( - self.get_service, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_services: gapic_v1.method.wrap_method( - self.list_services, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.update_service: gapic_v1.method.wrap_method( - self.update_service, - default_timeout=30.0, - client_info=client_info, - ), - self.delete_service: gapic_v1.method.wrap_method( - self.delete_service, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.create_service_level_objective: gapic_v1.method.wrap_method( - self.create_service_level_objective, - default_timeout=30.0, - client_info=client_info, - ), - self.get_service_level_objective: gapic_v1.method.wrap_method( - self.get_service_level_objective, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_service_level_objectives: gapic_v1.method.wrap_method( - self.list_service_level_objectives, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.update_service_level_objective: gapic_v1.method.wrap_method( - self.update_service_level_objective, - default_timeout=30.0, - client_info=client_info, - ), - self.delete_service_level_objective: gapic_v1.method.wrap_method( - self.delete_service_level_objective, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - } - - @property - def create_service(self) -> Callable[ - [service_service.CreateServiceRequest], - Union[ - gm_service.Service, - Awaitable[gm_service.Service] - ]]: - raise NotImplementedError() - - @property - def get_service(self) -> Callable[ - [service_service.GetServiceRequest], - Union[ - service.Service, - Awaitable[service.Service] - ]]: - raise NotImplementedError() - - @property - def list_services(self) -> Callable[ - [service_service.ListServicesRequest], - Union[ - service_service.ListServicesResponse, - Awaitable[service_service.ListServicesResponse] - ]]: - raise NotImplementedError() - - @property - def update_service(self) -> Callable[ - [service_service.UpdateServiceRequest], - Union[ - gm_service.Service, - Awaitable[gm_service.Service] - ]]: - raise NotImplementedError() - - @property - def delete_service(self) -> Callable[ - [service_service.DeleteServiceRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_service_level_objective(self) -> Callable[ - [service_service.CreateServiceLevelObjectiveRequest], - Union[ - service.ServiceLevelObjective, - Awaitable[service.ServiceLevelObjective] - ]]: - raise NotImplementedError() - - @property - def get_service_level_objective(self) -> Callable[ - [service_service.GetServiceLevelObjectiveRequest], - Union[ - service.ServiceLevelObjective, - Awaitable[service.ServiceLevelObjective] - ]]: - raise NotImplementedError() - - @property - def list_service_level_objectives(self) -> Callable[ - [service_service.ListServiceLevelObjectivesRequest], - Union[ - service_service.ListServiceLevelObjectivesResponse, - Awaitable[service_service.ListServiceLevelObjectivesResponse] - ]]: - raise NotImplementedError() - - @property - def update_service_level_objective(self) -> Callable[ - [service_service.UpdateServiceLevelObjectiveRequest], - Union[ - service.ServiceLevelObjective, - Awaitable[service.ServiceLevelObjective] - ]]: - raise NotImplementedError() - - @property - def delete_service_level_objective(self) -> Callable[ - [service_service.DeleteServiceLevelObjectiveRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'ServiceMonitoringServiceTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py deleted file mode 100644 index 2a98e523..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py +++ /dev/null @@ -1,492 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.monitoring_v3.types import service -from google.cloud.monitoring_v3.types import service as gm_service -from google.cloud.monitoring_v3.types import service_service -from google.protobuf import empty_pb2 # type: ignore -from .base import ServiceMonitoringServiceTransport, DEFAULT_CLIENT_INFO - - -class ServiceMonitoringServiceGrpcTransport(ServiceMonitoringServiceTransport): - """gRPC backend transport for ServiceMonitoringService. - - The Cloud Monitoring Service-Oriented Monitoring API has endpoints - for managing and querying aspects of a workspace's services. These - include the ``Service``'s monitored resources, its Service-Level - Objectives, and a taxonomy of categorized Health Metrics. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_service(self) -> Callable[ - [service_service.CreateServiceRequest], - gm_service.Service]: - r"""Return a callable for the create service method over gRPC. - - Create a ``Service``. - - Returns: - Callable[[~.CreateServiceRequest], - ~.Service]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_service' not in self._stubs: - self._stubs['create_service'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/CreateService', - request_serializer=service_service.CreateServiceRequest.serialize, - response_deserializer=gm_service.Service.deserialize, - ) - return self._stubs['create_service'] - - @property - def get_service(self) -> Callable[ - [service_service.GetServiceRequest], - service.Service]: - r"""Return a callable for the get service method over gRPC. - - Get the named ``Service``. - - Returns: - Callable[[~.GetServiceRequest], - ~.Service]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_service' not in self._stubs: - self._stubs['get_service'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/GetService', - request_serializer=service_service.GetServiceRequest.serialize, - response_deserializer=service.Service.deserialize, - ) - return self._stubs['get_service'] - - @property - def list_services(self) -> Callable[ - [service_service.ListServicesRequest], - service_service.ListServicesResponse]: - r"""Return a callable for the list services method over gRPC. - - List ``Service``\ s for this workspace. - - Returns: - Callable[[~.ListServicesRequest], - ~.ListServicesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_services' not in self._stubs: - self._stubs['list_services'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/ListServices', - request_serializer=service_service.ListServicesRequest.serialize, - response_deserializer=service_service.ListServicesResponse.deserialize, - ) - return self._stubs['list_services'] - - @property - def update_service(self) -> Callable[ - [service_service.UpdateServiceRequest], - gm_service.Service]: - r"""Return a callable for the update service method over gRPC. - - Update this ``Service``. - - Returns: - Callable[[~.UpdateServiceRequest], - ~.Service]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_service' not in self._stubs: - self._stubs['update_service'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/UpdateService', - request_serializer=service_service.UpdateServiceRequest.serialize, - response_deserializer=gm_service.Service.deserialize, - ) - return self._stubs['update_service'] - - @property - def delete_service(self) -> Callable[ - [service_service.DeleteServiceRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete service method over gRPC. - - Soft delete this ``Service``. - - Returns: - Callable[[~.DeleteServiceRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_service' not in self._stubs: - self._stubs['delete_service'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/DeleteService', - request_serializer=service_service.DeleteServiceRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_service'] - - @property - def create_service_level_objective(self) -> Callable[ - [service_service.CreateServiceLevelObjectiveRequest], - service.ServiceLevelObjective]: - r"""Return a callable for the create service level objective method over gRPC. - - Create a ``ServiceLevelObjective`` for the given ``Service``. - - Returns: - Callable[[~.CreateServiceLevelObjectiveRequest], - ~.ServiceLevelObjective]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_service_level_objective' not in self._stubs: - self._stubs['create_service_level_objective'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/CreateServiceLevelObjective', - request_serializer=service_service.CreateServiceLevelObjectiveRequest.serialize, - response_deserializer=service.ServiceLevelObjective.deserialize, - ) - return self._stubs['create_service_level_objective'] - - @property - def get_service_level_objective(self) -> Callable[ - [service_service.GetServiceLevelObjectiveRequest], - service.ServiceLevelObjective]: - r"""Return a callable for the get service level objective method over gRPC. - - Get a ``ServiceLevelObjective`` by name. - - Returns: - Callable[[~.GetServiceLevelObjectiveRequest], - ~.ServiceLevelObjective]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_service_level_objective' not in self._stubs: - self._stubs['get_service_level_objective'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/GetServiceLevelObjective', - request_serializer=service_service.GetServiceLevelObjectiveRequest.serialize, - response_deserializer=service.ServiceLevelObjective.deserialize, - ) - return self._stubs['get_service_level_objective'] - - @property - def list_service_level_objectives(self) -> Callable[ - [service_service.ListServiceLevelObjectivesRequest], - service_service.ListServiceLevelObjectivesResponse]: - r"""Return a callable for the list service level objectives method over gRPC. - - List the ``ServiceLevelObjective``\ s for the given ``Service``. - - Returns: - Callable[[~.ListServiceLevelObjectivesRequest], - ~.ListServiceLevelObjectivesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_service_level_objectives' not in self._stubs: - self._stubs['list_service_level_objectives'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/ListServiceLevelObjectives', - request_serializer=service_service.ListServiceLevelObjectivesRequest.serialize, - response_deserializer=service_service.ListServiceLevelObjectivesResponse.deserialize, - ) - return self._stubs['list_service_level_objectives'] - - @property - def update_service_level_objective(self) -> Callable[ - [service_service.UpdateServiceLevelObjectiveRequest], - service.ServiceLevelObjective]: - r"""Return a callable for the update service level objective method over gRPC. - - Update the given ``ServiceLevelObjective``. - - Returns: - Callable[[~.UpdateServiceLevelObjectiveRequest], - ~.ServiceLevelObjective]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_service_level_objective' not in self._stubs: - self._stubs['update_service_level_objective'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/UpdateServiceLevelObjective', - request_serializer=service_service.UpdateServiceLevelObjectiveRequest.serialize, - response_deserializer=service.ServiceLevelObjective.deserialize, - ) - return self._stubs['update_service_level_objective'] - - @property - def delete_service_level_objective(self) -> Callable[ - [service_service.DeleteServiceLevelObjectiveRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete service level objective method over gRPC. - - Delete the given ``ServiceLevelObjective``. - - Returns: - Callable[[~.DeleteServiceLevelObjectiveRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_service_level_objective' not in self._stubs: - self._stubs['delete_service_level_objective'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/DeleteServiceLevelObjective', - request_serializer=service_service.DeleteServiceLevelObjectiveRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_service_level_objective'] - - -__all__ = ( - 'ServiceMonitoringServiceGrpcTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py deleted file mode 100644 index b3811bbc..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,496 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.monitoring_v3.types import service -from google.cloud.monitoring_v3.types import service as gm_service -from google.cloud.monitoring_v3.types import service_service -from google.protobuf import empty_pb2 # type: ignore -from .base import ServiceMonitoringServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import ServiceMonitoringServiceGrpcTransport - - -class ServiceMonitoringServiceGrpcAsyncIOTransport(ServiceMonitoringServiceTransport): - """gRPC AsyncIO backend transport for ServiceMonitoringService. - - The Cloud Monitoring Service-Oriented Monitoring API has endpoints - for managing and querying aspects of a workspace's services. These - include the ``Service``'s monitored resources, its Service-Level - Objectives, and a taxonomy of categorized Health Metrics. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_service(self) -> Callable[ - [service_service.CreateServiceRequest], - Awaitable[gm_service.Service]]: - r"""Return a callable for the create service method over gRPC. - - Create a ``Service``. - - Returns: - Callable[[~.CreateServiceRequest], - Awaitable[~.Service]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_service' not in self._stubs: - self._stubs['create_service'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/CreateService', - request_serializer=service_service.CreateServiceRequest.serialize, - response_deserializer=gm_service.Service.deserialize, - ) - return self._stubs['create_service'] - - @property - def get_service(self) -> Callable[ - [service_service.GetServiceRequest], - Awaitable[service.Service]]: - r"""Return a callable for the get service method over gRPC. - - Get the named ``Service``. - - Returns: - Callable[[~.GetServiceRequest], - Awaitable[~.Service]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_service' not in self._stubs: - self._stubs['get_service'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/GetService', - request_serializer=service_service.GetServiceRequest.serialize, - response_deserializer=service.Service.deserialize, - ) - return self._stubs['get_service'] - - @property - def list_services(self) -> Callable[ - [service_service.ListServicesRequest], - Awaitable[service_service.ListServicesResponse]]: - r"""Return a callable for the list services method over gRPC. - - List ``Service``\ s for this workspace. - - Returns: - Callable[[~.ListServicesRequest], - Awaitable[~.ListServicesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_services' not in self._stubs: - self._stubs['list_services'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/ListServices', - request_serializer=service_service.ListServicesRequest.serialize, - response_deserializer=service_service.ListServicesResponse.deserialize, - ) - return self._stubs['list_services'] - - @property - def update_service(self) -> Callable[ - [service_service.UpdateServiceRequest], - Awaitable[gm_service.Service]]: - r"""Return a callable for the update service method over gRPC. - - Update this ``Service``. - - Returns: - Callable[[~.UpdateServiceRequest], - Awaitable[~.Service]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_service' not in self._stubs: - self._stubs['update_service'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/UpdateService', - request_serializer=service_service.UpdateServiceRequest.serialize, - response_deserializer=gm_service.Service.deserialize, - ) - return self._stubs['update_service'] - - @property - def delete_service(self) -> Callable[ - [service_service.DeleteServiceRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete service method over gRPC. - - Soft delete this ``Service``. - - Returns: - Callable[[~.DeleteServiceRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_service' not in self._stubs: - self._stubs['delete_service'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/DeleteService', - request_serializer=service_service.DeleteServiceRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_service'] - - @property - def create_service_level_objective(self) -> Callable[ - [service_service.CreateServiceLevelObjectiveRequest], - Awaitable[service.ServiceLevelObjective]]: - r"""Return a callable for the create service level objective method over gRPC. - - Create a ``ServiceLevelObjective`` for the given ``Service``. - - Returns: - Callable[[~.CreateServiceLevelObjectiveRequest], - Awaitable[~.ServiceLevelObjective]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_service_level_objective' not in self._stubs: - self._stubs['create_service_level_objective'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/CreateServiceLevelObjective', - request_serializer=service_service.CreateServiceLevelObjectiveRequest.serialize, - response_deserializer=service.ServiceLevelObjective.deserialize, - ) - return self._stubs['create_service_level_objective'] - - @property - def get_service_level_objective(self) -> Callable[ - [service_service.GetServiceLevelObjectiveRequest], - Awaitable[service.ServiceLevelObjective]]: - r"""Return a callable for the get service level objective method over gRPC. - - Get a ``ServiceLevelObjective`` by name. - - Returns: - Callable[[~.GetServiceLevelObjectiveRequest], - Awaitable[~.ServiceLevelObjective]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_service_level_objective' not in self._stubs: - self._stubs['get_service_level_objective'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/GetServiceLevelObjective', - request_serializer=service_service.GetServiceLevelObjectiveRequest.serialize, - response_deserializer=service.ServiceLevelObjective.deserialize, - ) - return self._stubs['get_service_level_objective'] - - @property - def list_service_level_objectives(self) -> Callable[ - [service_service.ListServiceLevelObjectivesRequest], - Awaitable[service_service.ListServiceLevelObjectivesResponse]]: - r"""Return a callable for the list service level objectives method over gRPC. - - List the ``ServiceLevelObjective``\ s for the given ``Service``. - - Returns: - Callable[[~.ListServiceLevelObjectivesRequest], - Awaitable[~.ListServiceLevelObjectivesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_service_level_objectives' not in self._stubs: - self._stubs['list_service_level_objectives'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/ListServiceLevelObjectives', - request_serializer=service_service.ListServiceLevelObjectivesRequest.serialize, - response_deserializer=service_service.ListServiceLevelObjectivesResponse.deserialize, - ) - return self._stubs['list_service_level_objectives'] - - @property - def update_service_level_objective(self) -> Callable[ - [service_service.UpdateServiceLevelObjectiveRequest], - Awaitable[service.ServiceLevelObjective]]: - r"""Return a callable for the update service level objective method over gRPC. - - Update the given ``ServiceLevelObjective``. - - Returns: - Callable[[~.UpdateServiceLevelObjectiveRequest], - Awaitable[~.ServiceLevelObjective]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_service_level_objective' not in self._stubs: - self._stubs['update_service_level_objective'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/UpdateServiceLevelObjective', - request_serializer=service_service.UpdateServiceLevelObjectiveRequest.serialize, - response_deserializer=service.ServiceLevelObjective.deserialize, - ) - return self._stubs['update_service_level_objective'] - - @property - def delete_service_level_objective(self) -> Callable[ - [service_service.DeleteServiceLevelObjectiveRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete service level objective method over gRPC. - - Delete the given ``ServiceLevelObjective``. - - Returns: - Callable[[~.DeleteServiceLevelObjectiveRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_service_level_objective' not in self._stubs: - self._stubs['delete_service_level_objective'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.ServiceMonitoringService/DeleteServiceLevelObjective', - request_serializer=service_service.DeleteServiceLevelObjectiveRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_service_level_objective'] - - -__all__ = ( - 'ServiceMonitoringServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/__init__.py deleted file mode 100644 index fd31924e..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import UptimeCheckServiceClient -from .async_client import UptimeCheckServiceAsyncClient - -__all__ = ( - 'UptimeCheckServiceClient', - 'UptimeCheckServiceAsyncClient', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py deleted file mode 100644 index 37f19380..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py +++ /dev/null @@ -1,686 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.services.uptime_check_service import pagers -from google.cloud.monitoring_v3.types import uptime -from google.cloud.monitoring_v3.types import uptime_service -from google.protobuf import duration_pb2 # type: ignore -from .transports.base import UptimeCheckServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import UptimeCheckServiceGrpcAsyncIOTransport -from .client import UptimeCheckServiceClient - - -class UptimeCheckServiceAsyncClient: - """The UptimeCheckService API is used to manage (list, create, delete, - edit) Uptime check configurations in the Stackdriver Monitoring - product. An Uptime check is a piece of configuration that determines - which resources and services to monitor for availability. These - configurations can also be configured interactively by navigating to - the [Cloud Console] (http://console.cloud.google.com), selecting the - appropriate project, clicking on "Monitoring" on the left-hand side - to navigate to Stackdriver, and then clicking on "Uptime". - """ - - _client: UptimeCheckServiceClient - - DEFAULT_ENDPOINT = UptimeCheckServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = UptimeCheckServiceClient.DEFAULT_MTLS_ENDPOINT - - uptime_check_config_path = staticmethod(UptimeCheckServiceClient.uptime_check_config_path) - parse_uptime_check_config_path = staticmethod(UptimeCheckServiceClient.parse_uptime_check_config_path) - common_billing_account_path = staticmethod(UptimeCheckServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(UptimeCheckServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(UptimeCheckServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(UptimeCheckServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(UptimeCheckServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(UptimeCheckServiceClient.parse_common_organization_path) - common_project_path = staticmethod(UptimeCheckServiceClient.common_project_path) - parse_common_project_path = staticmethod(UptimeCheckServiceClient.parse_common_project_path) - common_location_path = staticmethod(UptimeCheckServiceClient.common_location_path) - parse_common_location_path = staticmethod(UptimeCheckServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - UptimeCheckServiceAsyncClient: The constructed client. - """ - return UptimeCheckServiceClient.from_service_account_info.__func__(UptimeCheckServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - UptimeCheckServiceAsyncClient: The constructed client. - """ - return UptimeCheckServiceClient.from_service_account_file.__func__(UptimeCheckServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> UptimeCheckServiceTransport: - """Returns the transport used by the client instance. - - Returns: - UptimeCheckServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(UptimeCheckServiceClient).get_transport_class, type(UptimeCheckServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, UptimeCheckServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the uptime check service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.UptimeCheckServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = UptimeCheckServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_uptime_check_configs(self, - request: uptime_service.ListUptimeCheckConfigsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListUptimeCheckConfigsAsyncPager: - r"""Lists the existing valid Uptime check configurations - for the project (leaving out any invalid - configurations). - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest`): - The request object. The protocol for the - `ListUptimeCheckConfigs` request. - parent (:class:`str`): - Required. The - `project `__ - whose Uptime check configurations are listed. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.uptime_check_service.pagers.ListUptimeCheckConfigsAsyncPager: - The protocol for the ListUptimeCheckConfigs response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = uptime_service.ListUptimeCheckConfigsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_uptime_check_configs, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListUptimeCheckConfigsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_uptime_check_config(self, - request: uptime_service.GetUptimeCheckConfigRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> uptime.UptimeCheckConfig: - r"""Gets a single Uptime check configuration. - - Args: - request (:class:`google.cloud.monitoring_v3.types.GetUptimeCheckConfigRequest`): - The request object. The protocol for the - `GetUptimeCheckConfig` request. - name (:class:`str`): - Required. The Uptime check configuration to retrieve. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.UptimeCheckConfig: - This message configures which - resources and services to monitor for - availability. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = uptime_service.GetUptimeCheckConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_uptime_check_config, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_uptime_check_config(self, - request: uptime_service.CreateUptimeCheckConfigRequest = None, - *, - parent: str = None, - uptime_check_config: uptime.UptimeCheckConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> uptime.UptimeCheckConfig: - r"""Creates a new Uptime check configuration. - - Args: - request (:class:`google.cloud.monitoring_v3.types.CreateUptimeCheckConfigRequest`): - The request object. The protocol for the - `CreateUptimeCheckConfig` request. - parent (:class:`str`): - Required. The - `project `__ - in which to create the Uptime check. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - uptime_check_config (:class:`google.cloud.monitoring_v3.types.UptimeCheckConfig`): - Required. The new Uptime check - configuration. - - This corresponds to the ``uptime_check_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.UptimeCheckConfig: - This message configures which - resources and services to monitor for - availability. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, uptime_check_config]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = uptime_service.CreateUptimeCheckConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if uptime_check_config is not None: - request.uptime_check_config = uptime_check_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_uptime_check_config, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_uptime_check_config(self, - request: uptime_service.UpdateUptimeCheckConfigRequest = None, - *, - uptime_check_config: uptime.UptimeCheckConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> uptime.UptimeCheckConfig: - r"""Updates an Uptime check configuration. You can either replace - the entire configuration with a new one or replace only certain - fields in the current configuration by specifying the fields to - be updated via ``updateMask``. Returns the updated - configuration. - - Args: - request (:class:`google.cloud.monitoring_v3.types.UpdateUptimeCheckConfigRequest`): - The request object. The protocol for the - `UpdateUptimeCheckConfig` request. - uptime_check_config (:class:`google.cloud.monitoring_v3.types.UptimeCheckConfig`): - Required. If an ``updateMask`` has been specified, this - field gives the values for the set of fields mentioned - in the ``updateMask``. If an ``updateMask`` has not been - given, this Uptime check configuration replaces the - current configuration. If a field is mentioned in - ``updateMask`` but the corresonding field is omitted in - this partial Uptime check configuration, it has the - effect of deleting/clearing the field from the - configuration on the server. - - The following fields can be updated: ``display_name``, - ``http_check``, ``tcp_check``, ``timeout``, - ``content_matchers``, and ``selected_regions``. - - This corresponds to the ``uptime_check_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.UptimeCheckConfig: - This message configures which - resources and services to monitor for - availability. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([uptime_check_config]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = uptime_service.UpdateUptimeCheckConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if uptime_check_config is not None: - request.uptime_check_config = uptime_check_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_uptime_check_config, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("uptime_check_config.name", request.uptime_check_config.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_uptime_check_config(self, - request: uptime_service.DeleteUptimeCheckConfigRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an Uptime check configuration. Note that this - method will fail if the Uptime check configuration is - referenced by an alert policy or other dependent configs - that would be rendered invalid by the deletion. - - Args: - request (:class:`google.cloud.monitoring_v3.types.DeleteUptimeCheckConfigRequest`): - The request object. The protocol for the - `DeleteUptimeCheckConfig` request. - name (:class:`str`): - Required. The Uptime check configuration to delete. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = uptime_service.DeleteUptimeCheckConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_uptime_check_config, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_uptime_check_ips(self, - request: uptime_service.ListUptimeCheckIpsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListUptimeCheckIpsAsyncPager: - r"""Returns the list of IP addresses that checkers run - from - - Args: - request (:class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest`): - The request object. The protocol for the - `ListUptimeCheckIps` request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.uptime_check_service.pagers.ListUptimeCheckIpsAsyncPager: - The protocol for the ListUptimeCheckIps response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - request = uptime_service.ListUptimeCheckIpsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_uptime_check_ips, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListUptimeCheckIpsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "UptimeCheckServiceAsyncClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/client.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/client.py deleted file mode 100644 index 6c221982..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/client.py +++ /dev/null @@ -1,854 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.services.uptime_check_service import pagers -from google.cloud.monitoring_v3.types import uptime -from google.cloud.monitoring_v3.types import uptime_service -from google.protobuf import duration_pb2 # type: ignore -from .transports.base import UptimeCheckServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import UptimeCheckServiceGrpcTransport -from .transports.grpc_asyncio import UptimeCheckServiceGrpcAsyncIOTransport - - -class UptimeCheckServiceClientMeta(type): - """Metaclass for the UptimeCheckService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[UptimeCheckServiceTransport]] - _transport_registry["grpc"] = UptimeCheckServiceGrpcTransport - _transport_registry["grpc_asyncio"] = UptimeCheckServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[UptimeCheckServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class UptimeCheckServiceClient(metaclass=UptimeCheckServiceClientMeta): - """The UptimeCheckService API is used to manage (list, create, delete, - edit) Uptime check configurations in the Stackdriver Monitoring - product. An Uptime check is a piece of configuration that determines - which resources and services to monitor for availability. These - configurations can also be configured interactively by navigating to - the [Cloud Console] (http://console.cloud.google.com), selecting the - appropriate project, clicking on "Monitoring" on the left-hand side - to navigate to Stackdriver, and then clicking on "Uptime". - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "monitoring.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - UptimeCheckServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - UptimeCheckServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> UptimeCheckServiceTransport: - """Returns the transport used by the client instance. - - Returns: - UptimeCheckServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def uptime_check_config_path(project: str,uptime_check_config: str,) -> str: - """Returns a fully-qualified uptime_check_config string.""" - return "projects/{project}/uptimeCheckConfigs/{uptime_check_config}".format(project=project, uptime_check_config=uptime_check_config, ) - - @staticmethod - def parse_uptime_check_config_path(path: str) -> Dict[str,str]: - """Parses a uptime_check_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/uptimeCheckConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, UptimeCheckServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the uptime check service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, UptimeCheckServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, UptimeCheckServiceTransport): - # transport is a UptimeCheckServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def list_uptime_check_configs(self, - request: uptime_service.ListUptimeCheckConfigsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListUptimeCheckConfigsPager: - r"""Lists the existing valid Uptime check configurations - for the project (leaving out any invalid - configurations). - - Args: - request (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest): - The request object. The protocol for the - `ListUptimeCheckConfigs` request. - parent (str): - Required. The - `project `__ - whose Uptime check configurations are listed. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.uptime_check_service.pagers.ListUptimeCheckConfigsPager: - The protocol for the ListUptimeCheckConfigs response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a uptime_service.ListUptimeCheckConfigsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, uptime_service.ListUptimeCheckConfigsRequest): - request = uptime_service.ListUptimeCheckConfigsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_uptime_check_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListUptimeCheckConfigsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_uptime_check_config(self, - request: uptime_service.GetUptimeCheckConfigRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> uptime.UptimeCheckConfig: - r"""Gets a single Uptime check configuration. - - Args: - request (google.cloud.monitoring_v3.types.GetUptimeCheckConfigRequest): - The request object. The protocol for the - `GetUptimeCheckConfig` request. - name (str): - Required. The Uptime check configuration to retrieve. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.UptimeCheckConfig: - This message configures which - resources and services to monitor for - availability. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a uptime_service.GetUptimeCheckConfigRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, uptime_service.GetUptimeCheckConfigRequest): - request = uptime_service.GetUptimeCheckConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_uptime_check_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_uptime_check_config(self, - request: uptime_service.CreateUptimeCheckConfigRequest = None, - *, - parent: str = None, - uptime_check_config: uptime.UptimeCheckConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> uptime.UptimeCheckConfig: - r"""Creates a new Uptime check configuration. - - Args: - request (google.cloud.monitoring_v3.types.CreateUptimeCheckConfigRequest): - The request object. The protocol for the - `CreateUptimeCheckConfig` request. - parent (str): - Required. The - `project `__ - in which to create the Uptime check. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - uptime_check_config (google.cloud.monitoring_v3.types.UptimeCheckConfig): - Required. The new Uptime check - configuration. - - This corresponds to the ``uptime_check_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.UptimeCheckConfig: - This message configures which - resources and services to monitor for - availability. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, uptime_check_config]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a uptime_service.CreateUptimeCheckConfigRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, uptime_service.CreateUptimeCheckConfigRequest): - request = uptime_service.CreateUptimeCheckConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if uptime_check_config is not None: - request.uptime_check_config = uptime_check_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_uptime_check_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_uptime_check_config(self, - request: uptime_service.UpdateUptimeCheckConfigRequest = None, - *, - uptime_check_config: uptime.UptimeCheckConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> uptime.UptimeCheckConfig: - r"""Updates an Uptime check configuration. You can either replace - the entire configuration with a new one or replace only certain - fields in the current configuration by specifying the fields to - be updated via ``updateMask``. Returns the updated - configuration. - - Args: - request (google.cloud.monitoring_v3.types.UpdateUptimeCheckConfigRequest): - The request object. The protocol for the - `UpdateUptimeCheckConfig` request. - uptime_check_config (google.cloud.monitoring_v3.types.UptimeCheckConfig): - Required. If an ``updateMask`` has been specified, this - field gives the values for the set of fields mentioned - in the ``updateMask``. If an ``updateMask`` has not been - given, this Uptime check configuration replaces the - current configuration. If a field is mentioned in - ``updateMask`` but the corresonding field is omitted in - this partial Uptime check configuration, it has the - effect of deleting/clearing the field from the - configuration on the server. - - The following fields can be updated: ``display_name``, - ``http_check``, ``tcp_check``, ``timeout``, - ``content_matchers``, and ``selected_regions``. - - This corresponds to the ``uptime_check_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.types.UptimeCheckConfig: - This message configures which - resources and services to monitor for - availability. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([uptime_check_config]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a uptime_service.UpdateUptimeCheckConfigRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, uptime_service.UpdateUptimeCheckConfigRequest): - request = uptime_service.UpdateUptimeCheckConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if uptime_check_config is not None: - request.uptime_check_config = uptime_check_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_uptime_check_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("uptime_check_config.name", request.uptime_check_config.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_uptime_check_config(self, - request: uptime_service.DeleteUptimeCheckConfigRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an Uptime check configuration. Note that this - method will fail if the Uptime check configuration is - referenced by an alert policy or other dependent configs - that would be rendered invalid by the deletion. - - Args: - request (google.cloud.monitoring_v3.types.DeleteUptimeCheckConfigRequest): - The request object. The protocol for the - `DeleteUptimeCheckConfig` request. - name (str): - Required. The Uptime check configuration to delete. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a uptime_service.DeleteUptimeCheckConfigRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, uptime_service.DeleteUptimeCheckConfigRequest): - request = uptime_service.DeleteUptimeCheckConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_uptime_check_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_uptime_check_ips(self, - request: uptime_service.ListUptimeCheckIpsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListUptimeCheckIpsPager: - r"""Returns the list of IP addresses that checkers run - from - - Args: - request (google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest): - The request object. The protocol for the - `ListUptimeCheckIps` request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.monitoring_v3.services.uptime_check_service.pagers.ListUptimeCheckIpsPager: - The protocol for the ListUptimeCheckIps response. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a uptime_service.ListUptimeCheckIpsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, uptime_service.ListUptimeCheckIpsRequest): - request = uptime_service.ListUptimeCheckIpsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_uptime_check_ips] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListUptimeCheckIpsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "UptimeCheckServiceClient", -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/pagers.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/pagers.py deleted file mode 100644 index 59b3cba2..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/pagers.py +++ /dev/null @@ -1,263 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.monitoring_v3.types import uptime -from google.cloud.monitoring_v3.types import uptime_service - - -class ListUptimeCheckConfigsPager: - """A pager for iterating through ``list_uptime_check_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``uptime_check_configs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListUptimeCheckConfigs`` requests and continue to iterate - through the ``uptime_check_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., uptime_service.ListUptimeCheckConfigsResponse], - request: uptime_service.ListUptimeCheckConfigsRequest, - response: uptime_service.ListUptimeCheckConfigsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = uptime_service.ListUptimeCheckConfigsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[uptime_service.ListUptimeCheckConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[uptime.UptimeCheckConfig]: - for page in self.pages: - yield from page.uptime_check_configs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListUptimeCheckConfigsAsyncPager: - """A pager for iterating through ``list_uptime_check_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``uptime_check_configs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListUptimeCheckConfigs`` requests and continue to iterate - through the ``uptime_check_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[uptime_service.ListUptimeCheckConfigsResponse]], - request: uptime_service.ListUptimeCheckConfigsRequest, - response: uptime_service.ListUptimeCheckConfigsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListUptimeCheckConfigsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = uptime_service.ListUptimeCheckConfigsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[uptime_service.ListUptimeCheckConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[uptime.UptimeCheckConfig]: - async def async_generator(): - async for page in self.pages: - for response in page.uptime_check_configs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListUptimeCheckIpsPager: - """A pager for iterating through ``list_uptime_check_ips`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``uptime_check_ips`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListUptimeCheckIps`` requests and continue to iterate - through the ``uptime_check_ips`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., uptime_service.ListUptimeCheckIpsResponse], - request: uptime_service.ListUptimeCheckIpsRequest, - response: uptime_service.ListUptimeCheckIpsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = uptime_service.ListUptimeCheckIpsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[uptime_service.ListUptimeCheckIpsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[uptime.UptimeCheckIp]: - for page in self.pages: - yield from page.uptime_check_ips - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListUptimeCheckIpsAsyncPager: - """A pager for iterating through ``list_uptime_check_ips`` requests. - - This class thinly wraps an initial - :class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``uptime_check_ips`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListUptimeCheckIps`` requests and continue to iterate - through the ``uptime_check_ips`` field on the - corresponding responses. - - All the usual :class:`google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[uptime_service.ListUptimeCheckIpsResponse]], - request: uptime_service.ListUptimeCheckIpsRequest, - response: uptime_service.ListUptimeCheckIpsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest): - The initial request object. - response (google.cloud.monitoring_v3.types.ListUptimeCheckIpsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = uptime_service.ListUptimeCheckIpsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[uptime_service.ListUptimeCheckIpsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[uptime.UptimeCheckIp]: - async def async_generator(): - async for page in self.pages: - for response in page.uptime_check_ips: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/__init__.py deleted file mode 100644 index 7f3aae1e..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import UptimeCheckServiceTransport -from .grpc import UptimeCheckServiceGrpcTransport -from .grpc_asyncio import UptimeCheckServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[UptimeCheckServiceTransport]] -_transport_registry['grpc'] = UptimeCheckServiceGrpcTransport -_transport_registry['grpc_asyncio'] = UptimeCheckServiceGrpcAsyncIOTransport - -__all__ = ( - 'UptimeCheckServiceTransport', - 'UptimeCheckServiceGrpcTransport', - 'UptimeCheckServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py deleted file mode 100644 index 1d597e1b..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py +++ /dev/null @@ -1,266 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.monitoring_v3.types import uptime -from google.cloud.monitoring_v3.types import uptime_service -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-monitoring', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class UptimeCheckServiceTransport(abc.ABC): - """Abstract transport class for UptimeCheckService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ) - - DEFAULT_HOST: str = 'monitoring.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_uptime_check_configs: gapic_v1.method.wrap_method( - self.list_uptime_check_configs, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.get_uptime_check_config: gapic_v1.method.wrap_method( - self.get_uptime_check_config, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.create_uptime_check_config: gapic_v1.method.wrap_method( - self.create_uptime_check_config, - default_timeout=30.0, - client_info=client_info, - ), - self.update_uptime_check_config: gapic_v1.method.wrap_method( - self.update_uptime_check_config, - default_timeout=30.0, - client_info=client_info, - ), - self.delete_uptime_check_config: gapic_v1.method.wrap_method( - self.delete_uptime_check_config, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_uptime_check_ips: gapic_v1.method.wrap_method( - self.list_uptime_check_ips, - default_retry=retries.Retry( -initial=0.1,maximum=30.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - } - - @property - def list_uptime_check_configs(self) -> Callable[ - [uptime_service.ListUptimeCheckConfigsRequest], - Union[ - uptime_service.ListUptimeCheckConfigsResponse, - Awaitable[uptime_service.ListUptimeCheckConfigsResponse] - ]]: - raise NotImplementedError() - - @property - def get_uptime_check_config(self) -> Callable[ - [uptime_service.GetUptimeCheckConfigRequest], - Union[ - uptime.UptimeCheckConfig, - Awaitable[uptime.UptimeCheckConfig] - ]]: - raise NotImplementedError() - - @property - def create_uptime_check_config(self) -> Callable[ - [uptime_service.CreateUptimeCheckConfigRequest], - Union[ - uptime.UptimeCheckConfig, - Awaitable[uptime.UptimeCheckConfig] - ]]: - raise NotImplementedError() - - @property - def update_uptime_check_config(self) -> Callable[ - [uptime_service.UpdateUptimeCheckConfigRequest], - Union[ - uptime.UptimeCheckConfig, - Awaitable[uptime.UptimeCheckConfig] - ]]: - raise NotImplementedError() - - @property - def delete_uptime_check_config(self) -> Callable[ - [uptime_service.DeleteUptimeCheckConfigRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_uptime_check_ips(self) -> Callable[ - [uptime_service.ListUptimeCheckIpsRequest], - Union[ - uptime_service.ListUptimeCheckIpsResponse, - Awaitable[uptime_service.ListUptimeCheckIpsResponse] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'UptimeCheckServiceTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py deleted file mode 100644 index f7704bae..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py +++ /dev/null @@ -1,401 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.monitoring_v3.types import uptime -from google.cloud.monitoring_v3.types import uptime_service -from google.protobuf import empty_pb2 # type: ignore -from .base import UptimeCheckServiceTransport, DEFAULT_CLIENT_INFO - - -class UptimeCheckServiceGrpcTransport(UptimeCheckServiceTransport): - """gRPC backend transport for UptimeCheckService. - - The UptimeCheckService API is used to manage (list, create, delete, - edit) Uptime check configurations in the Stackdriver Monitoring - product. An Uptime check is a piece of configuration that determines - which resources and services to monitor for availability. These - configurations can also be configured interactively by navigating to - the [Cloud Console] (http://console.cloud.google.com), selecting the - appropriate project, clicking on "Monitoring" on the left-hand side - to navigate to Stackdriver, and then clicking on "Uptime". - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_uptime_check_configs(self) -> Callable[ - [uptime_service.ListUptimeCheckConfigsRequest], - uptime_service.ListUptimeCheckConfigsResponse]: - r"""Return a callable for the list uptime check configs method over gRPC. - - Lists the existing valid Uptime check configurations - for the project (leaving out any invalid - configurations). - - Returns: - Callable[[~.ListUptimeCheckConfigsRequest], - ~.ListUptimeCheckConfigsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_uptime_check_configs' not in self._stubs: - self._stubs['list_uptime_check_configs'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/ListUptimeCheckConfigs', - request_serializer=uptime_service.ListUptimeCheckConfigsRequest.serialize, - response_deserializer=uptime_service.ListUptimeCheckConfigsResponse.deserialize, - ) - return self._stubs['list_uptime_check_configs'] - - @property - def get_uptime_check_config(self) -> Callable[ - [uptime_service.GetUptimeCheckConfigRequest], - uptime.UptimeCheckConfig]: - r"""Return a callable for the get uptime check config method over gRPC. - - Gets a single Uptime check configuration. - - Returns: - Callable[[~.GetUptimeCheckConfigRequest], - ~.UptimeCheckConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_uptime_check_config' not in self._stubs: - self._stubs['get_uptime_check_config'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/GetUptimeCheckConfig', - request_serializer=uptime_service.GetUptimeCheckConfigRequest.serialize, - response_deserializer=uptime.UptimeCheckConfig.deserialize, - ) - return self._stubs['get_uptime_check_config'] - - @property - def create_uptime_check_config(self) -> Callable[ - [uptime_service.CreateUptimeCheckConfigRequest], - uptime.UptimeCheckConfig]: - r"""Return a callable for the create uptime check config method over gRPC. - - Creates a new Uptime check configuration. - - Returns: - Callable[[~.CreateUptimeCheckConfigRequest], - ~.UptimeCheckConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_uptime_check_config' not in self._stubs: - self._stubs['create_uptime_check_config'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/CreateUptimeCheckConfig', - request_serializer=uptime_service.CreateUptimeCheckConfigRequest.serialize, - response_deserializer=uptime.UptimeCheckConfig.deserialize, - ) - return self._stubs['create_uptime_check_config'] - - @property - def update_uptime_check_config(self) -> Callable[ - [uptime_service.UpdateUptimeCheckConfigRequest], - uptime.UptimeCheckConfig]: - r"""Return a callable for the update uptime check config method over gRPC. - - Updates an Uptime check configuration. You can either replace - the entire configuration with a new one or replace only certain - fields in the current configuration by specifying the fields to - be updated via ``updateMask``. Returns the updated - configuration. - - Returns: - Callable[[~.UpdateUptimeCheckConfigRequest], - ~.UptimeCheckConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_uptime_check_config' not in self._stubs: - self._stubs['update_uptime_check_config'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/UpdateUptimeCheckConfig', - request_serializer=uptime_service.UpdateUptimeCheckConfigRequest.serialize, - response_deserializer=uptime.UptimeCheckConfig.deserialize, - ) - return self._stubs['update_uptime_check_config'] - - @property - def delete_uptime_check_config(self) -> Callable[ - [uptime_service.DeleteUptimeCheckConfigRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete uptime check config method over gRPC. - - Deletes an Uptime check configuration. Note that this - method will fail if the Uptime check configuration is - referenced by an alert policy or other dependent configs - that would be rendered invalid by the deletion. - - Returns: - Callable[[~.DeleteUptimeCheckConfigRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_uptime_check_config' not in self._stubs: - self._stubs['delete_uptime_check_config'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/DeleteUptimeCheckConfig', - request_serializer=uptime_service.DeleteUptimeCheckConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_uptime_check_config'] - - @property - def list_uptime_check_ips(self) -> Callable[ - [uptime_service.ListUptimeCheckIpsRequest], - uptime_service.ListUptimeCheckIpsResponse]: - r"""Return a callable for the list uptime check ips method over gRPC. - - Returns the list of IP addresses that checkers run - from - - Returns: - Callable[[~.ListUptimeCheckIpsRequest], - ~.ListUptimeCheckIpsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_uptime_check_ips' not in self._stubs: - self._stubs['list_uptime_check_ips'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/ListUptimeCheckIps', - request_serializer=uptime_service.ListUptimeCheckIpsRequest.serialize, - response_deserializer=uptime_service.ListUptimeCheckIpsResponse.deserialize, - ) - return self._stubs['list_uptime_check_ips'] - - -__all__ = ( - 'UptimeCheckServiceGrpcTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py deleted file mode 100644 index e4b06f19..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,405 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.monitoring_v3.types import uptime -from google.cloud.monitoring_v3.types import uptime_service -from google.protobuf import empty_pb2 # type: ignore -from .base import UptimeCheckServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import UptimeCheckServiceGrpcTransport - - -class UptimeCheckServiceGrpcAsyncIOTransport(UptimeCheckServiceTransport): - """gRPC AsyncIO backend transport for UptimeCheckService. - - The UptimeCheckService API is used to manage (list, create, delete, - edit) Uptime check configurations in the Stackdriver Monitoring - product. An Uptime check is a piece of configuration that determines - which resources and services to monitor for availability. These - configurations can also be configured interactively by navigating to - the [Cloud Console] (http://console.cloud.google.com), selecting the - appropriate project, clicking on "Monitoring" on the left-hand side - to navigate to Stackdriver, and then clicking on "Uptime". - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'monitoring.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_uptime_check_configs(self) -> Callable[ - [uptime_service.ListUptimeCheckConfigsRequest], - Awaitable[uptime_service.ListUptimeCheckConfigsResponse]]: - r"""Return a callable for the list uptime check configs method over gRPC. - - Lists the existing valid Uptime check configurations - for the project (leaving out any invalid - configurations). - - Returns: - Callable[[~.ListUptimeCheckConfigsRequest], - Awaitable[~.ListUptimeCheckConfigsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_uptime_check_configs' not in self._stubs: - self._stubs['list_uptime_check_configs'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/ListUptimeCheckConfigs', - request_serializer=uptime_service.ListUptimeCheckConfigsRequest.serialize, - response_deserializer=uptime_service.ListUptimeCheckConfigsResponse.deserialize, - ) - return self._stubs['list_uptime_check_configs'] - - @property - def get_uptime_check_config(self) -> Callable[ - [uptime_service.GetUptimeCheckConfigRequest], - Awaitable[uptime.UptimeCheckConfig]]: - r"""Return a callable for the get uptime check config method over gRPC. - - Gets a single Uptime check configuration. - - Returns: - Callable[[~.GetUptimeCheckConfigRequest], - Awaitable[~.UptimeCheckConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_uptime_check_config' not in self._stubs: - self._stubs['get_uptime_check_config'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/GetUptimeCheckConfig', - request_serializer=uptime_service.GetUptimeCheckConfigRequest.serialize, - response_deserializer=uptime.UptimeCheckConfig.deserialize, - ) - return self._stubs['get_uptime_check_config'] - - @property - def create_uptime_check_config(self) -> Callable[ - [uptime_service.CreateUptimeCheckConfigRequest], - Awaitable[uptime.UptimeCheckConfig]]: - r"""Return a callable for the create uptime check config method over gRPC. - - Creates a new Uptime check configuration. - - Returns: - Callable[[~.CreateUptimeCheckConfigRequest], - Awaitable[~.UptimeCheckConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_uptime_check_config' not in self._stubs: - self._stubs['create_uptime_check_config'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/CreateUptimeCheckConfig', - request_serializer=uptime_service.CreateUptimeCheckConfigRequest.serialize, - response_deserializer=uptime.UptimeCheckConfig.deserialize, - ) - return self._stubs['create_uptime_check_config'] - - @property - def update_uptime_check_config(self) -> Callable[ - [uptime_service.UpdateUptimeCheckConfigRequest], - Awaitable[uptime.UptimeCheckConfig]]: - r"""Return a callable for the update uptime check config method over gRPC. - - Updates an Uptime check configuration. You can either replace - the entire configuration with a new one or replace only certain - fields in the current configuration by specifying the fields to - be updated via ``updateMask``. Returns the updated - configuration. - - Returns: - Callable[[~.UpdateUptimeCheckConfigRequest], - Awaitable[~.UptimeCheckConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_uptime_check_config' not in self._stubs: - self._stubs['update_uptime_check_config'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/UpdateUptimeCheckConfig', - request_serializer=uptime_service.UpdateUptimeCheckConfigRequest.serialize, - response_deserializer=uptime.UptimeCheckConfig.deserialize, - ) - return self._stubs['update_uptime_check_config'] - - @property - def delete_uptime_check_config(self) -> Callable[ - [uptime_service.DeleteUptimeCheckConfigRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete uptime check config method over gRPC. - - Deletes an Uptime check configuration. Note that this - method will fail if the Uptime check configuration is - referenced by an alert policy or other dependent configs - that would be rendered invalid by the deletion. - - Returns: - Callable[[~.DeleteUptimeCheckConfigRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_uptime_check_config' not in self._stubs: - self._stubs['delete_uptime_check_config'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/DeleteUptimeCheckConfig', - request_serializer=uptime_service.DeleteUptimeCheckConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_uptime_check_config'] - - @property - def list_uptime_check_ips(self) -> Callable[ - [uptime_service.ListUptimeCheckIpsRequest], - Awaitable[uptime_service.ListUptimeCheckIpsResponse]]: - r"""Return a callable for the list uptime check ips method over gRPC. - - Returns the list of IP addresses that checkers run - from - - Returns: - Callable[[~.ListUptimeCheckIpsRequest], - Awaitable[~.ListUptimeCheckIpsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_uptime_check_ips' not in self._stubs: - self._stubs['list_uptime_check_ips'] = self.grpc_channel.unary_unary( - '/google.monitoring.v3.UptimeCheckService/ListUptimeCheckIps', - request_serializer=uptime_service.ListUptimeCheckIpsRequest.serialize, - response_deserializer=uptime_service.ListUptimeCheckIpsResponse.deserialize, - ) - return self._stubs['list_uptime_check_ips'] - - -__all__ = ( - 'UptimeCheckServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/__init__.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/__init__.py deleted file mode 100644 index fe3a76e2..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/__init__.py +++ /dev/null @@ -1,240 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .alert import ( - AlertPolicy, -) -from .alert_service import ( - CreateAlertPolicyRequest, - DeleteAlertPolicyRequest, - GetAlertPolicyRequest, - ListAlertPoliciesRequest, - ListAlertPoliciesResponse, - UpdateAlertPolicyRequest, -) -from .common import ( - Aggregation, - TimeInterval, - TypedValue, - ComparisonType, -) -from .dropped_labels import ( - DroppedLabels, -) -from .group import ( - Group, -) -from .group_service import ( - CreateGroupRequest, - DeleteGroupRequest, - GetGroupRequest, - ListGroupMembersRequest, - ListGroupMembersResponse, - ListGroupsRequest, - ListGroupsResponse, - UpdateGroupRequest, -) -from .metric import ( - LabelValue, - Point, - QueryError, - TextLocator, - TimeSeries, - TimeSeriesData, - TimeSeriesDescriptor, -) -from .metric_service import ( - CreateMetricDescriptorRequest, - CreateTimeSeriesError, - CreateTimeSeriesRequest, - CreateTimeSeriesSummary, - DeleteMetricDescriptorRequest, - GetMetricDescriptorRequest, - GetMonitoredResourceDescriptorRequest, - ListMetricDescriptorsRequest, - ListMetricDescriptorsResponse, - ListMonitoredResourceDescriptorsRequest, - ListMonitoredResourceDescriptorsResponse, - ListTimeSeriesRequest, - ListTimeSeriesResponse, - QueryErrorList, - QueryTimeSeriesRequest, - QueryTimeSeriesResponse, -) -from .mutation_record import ( - MutationRecord, -) -from .notification import ( - NotificationChannel, - NotificationChannelDescriptor, -) -from .notification_service import ( - CreateNotificationChannelRequest, - DeleteNotificationChannelRequest, - GetNotificationChannelDescriptorRequest, - GetNotificationChannelRequest, - GetNotificationChannelVerificationCodeRequest, - GetNotificationChannelVerificationCodeResponse, - ListNotificationChannelDescriptorsRequest, - ListNotificationChannelDescriptorsResponse, - ListNotificationChannelsRequest, - ListNotificationChannelsResponse, - SendNotificationChannelVerificationCodeRequest, - UpdateNotificationChannelRequest, - VerifyNotificationChannelRequest, -) -from .service import ( - BasicSli, - DistributionCut, - Range, - RequestBasedSli, - Service, - ServiceLevelIndicator, - ServiceLevelObjective, - TimeSeriesRatio, - WindowsBasedSli, -) -from .service_service import ( - CreateServiceLevelObjectiveRequest, - CreateServiceRequest, - DeleteServiceLevelObjectiveRequest, - DeleteServiceRequest, - GetServiceLevelObjectiveRequest, - GetServiceRequest, - ListServiceLevelObjectivesRequest, - ListServiceLevelObjectivesResponse, - ListServicesRequest, - ListServicesResponse, - UpdateServiceLevelObjectiveRequest, - UpdateServiceRequest, -) -from .span_context import ( - SpanContext, -) -from .uptime import ( - InternalChecker, - UptimeCheckConfig, - UptimeCheckIp, - GroupResourceType, - UptimeCheckRegion, -) -from .uptime_service import ( - CreateUptimeCheckConfigRequest, - DeleteUptimeCheckConfigRequest, - GetUptimeCheckConfigRequest, - ListUptimeCheckConfigsRequest, - ListUptimeCheckConfigsResponse, - ListUptimeCheckIpsRequest, - ListUptimeCheckIpsResponse, - UpdateUptimeCheckConfigRequest, -) - -__all__ = ( - 'AlertPolicy', - 'CreateAlertPolicyRequest', - 'DeleteAlertPolicyRequest', - 'GetAlertPolicyRequest', - 'ListAlertPoliciesRequest', - 'ListAlertPoliciesResponse', - 'UpdateAlertPolicyRequest', - 'Aggregation', - 'TimeInterval', - 'TypedValue', - 'ComparisonType', - 'DroppedLabels', - 'Group', - 'CreateGroupRequest', - 'DeleteGroupRequest', - 'GetGroupRequest', - 'ListGroupMembersRequest', - 'ListGroupMembersResponse', - 'ListGroupsRequest', - 'ListGroupsResponse', - 'UpdateGroupRequest', - 'LabelValue', - 'Point', - 'QueryError', - 'TextLocator', - 'TimeSeries', - 'TimeSeriesData', - 'TimeSeriesDescriptor', - 'CreateMetricDescriptorRequest', - 'CreateTimeSeriesError', - 'CreateTimeSeriesRequest', - 'CreateTimeSeriesSummary', - 'DeleteMetricDescriptorRequest', - 'GetMetricDescriptorRequest', - 'GetMonitoredResourceDescriptorRequest', - 'ListMetricDescriptorsRequest', - 'ListMetricDescriptorsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'ListTimeSeriesRequest', - 'ListTimeSeriesResponse', - 'QueryErrorList', - 'QueryTimeSeriesRequest', - 'QueryTimeSeriesResponse', - 'MutationRecord', - 'NotificationChannel', - 'NotificationChannelDescriptor', - 'CreateNotificationChannelRequest', - 'DeleteNotificationChannelRequest', - 'GetNotificationChannelDescriptorRequest', - 'GetNotificationChannelRequest', - 'GetNotificationChannelVerificationCodeRequest', - 'GetNotificationChannelVerificationCodeResponse', - 'ListNotificationChannelDescriptorsRequest', - 'ListNotificationChannelDescriptorsResponse', - 'ListNotificationChannelsRequest', - 'ListNotificationChannelsResponse', - 'SendNotificationChannelVerificationCodeRequest', - 'UpdateNotificationChannelRequest', - 'VerifyNotificationChannelRequest', - 'BasicSli', - 'DistributionCut', - 'Range', - 'RequestBasedSli', - 'Service', - 'ServiceLevelIndicator', - 'ServiceLevelObjective', - 'TimeSeriesRatio', - 'WindowsBasedSli', - 'CreateServiceLevelObjectiveRequest', - 'CreateServiceRequest', - 'DeleteServiceLevelObjectiveRequest', - 'DeleteServiceRequest', - 'GetServiceLevelObjectiveRequest', - 'GetServiceRequest', - 'ListServiceLevelObjectivesRequest', - 'ListServiceLevelObjectivesResponse', - 'ListServicesRequest', - 'ListServicesResponse', - 'UpdateServiceLevelObjectiveRequest', - 'UpdateServiceRequest', - 'SpanContext', - 'InternalChecker', - 'UptimeCheckConfig', - 'UptimeCheckIp', - 'GroupResourceType', - 'UptimeCheckRegion', - 'CreateUptimeCheckConfigRequest', - 'DeleteUptimeCheckConfigRequest', - 'GetUptimeCheckConfigRequest', - 'ListUptimeCheckConfigsRequest', - 'ListUptimeCheckConfigsResponse', - 'ListUptimeCheckIpsRequest', - 'ListUptimeCheckIpsResponse', - 'UpdateUptimeCheckConfigRequest', -) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert.py deleted file mode 100644 index 1e3996e8..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert.py +++ /dev/null @@ -1,654 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.monitoring_v3.types import common -from google.cloud.monitoring_v3.types import mutation_record as gm_mutation_record -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'AlertPolicy', - }, -) - - -class AlertPolicy(proto.Message): - r"""A description of the conditions under which some aspect of your - system is considered to be "unhealthy" and the ways to notify people - or services about this state. For an overview of alert policies, see - `Introduction to - Alerting `__. - - Attributes: - name (str): - Required if the policy exists. The resource name for this - policy. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] - - ``[ALERT_POLICY_ID]`` is assigned by Stackdriver Monitoring - when the policy is created. When calling the - [alertPolicies.create][google.monitoring.v3.AlertPolicyService.CreateAlertPolicy] - method, do not include the ``name`` field in the alerting - policy passed as part of the request. - display_name (str): - A short name or phrase used to identify the - policy in dashboards, notifications, and - incidents. To avoid confusion, don't use the - same display name for multiple policies in the - same project. The name is limited to 512 Unicode - characters. - documentation (google.cloud.monitoring_v3.types.AlertPolicy.Documentation): - Documentation that is included with - notifications and incidents related to this - policy. Best practice is for the documentation - to include information to help responders - understand, mitigate, escalate, and correct the - underlying problems detected by the alerting - policy. Notification channels that have limited - capacity might not show this documentation. - user_labels (Sequence[google.cloud.monitoring_v3.types.AlertPolicy.UserLabelsEntry]): - User-supplied key/value data to be used for organizing and - identifying the ``AlertPolicy`` objects. - - The field can contain up to 64 entries. Each key and value - is limited to 63 Unicode characters or 128 bytes, whichever - is smaller. Labels and values can contain only lowercase - letters, numerals, underscores, and dashes. Keys must begin - with a letter. - conditions (Sequence[google.cloud.monitoring_v3.types.AlertPolicy.Condition]): - A list of conditions for the policy. The conditions are - combined by AND or OR according to the ``combiner`` field. - If the combined conditions evaluate to true, then an - incident is created. A policy can have from one to six - conditions. If ``condition_time_series_query_language`` is - present, it must be the only ``condition``. - combiner (google.cloud.monitoring_v3.types.AlertPolicy.ConditionCombinerType): - How to combine the results of multiple conditions to - determine if an incident should be opened. If - ``condition_time_series_query_language`` is present, this - must be ``COMBINE_UNSPECIFIED``. - enabled (google.protobuf.wrappers_pb2.BoolValue): - Whether or not the policy is enabled. On - write, the default interpretation if unset is - that the policy is enabled. On read, clients - should not make any assumption about the state - if it has not been populated. The field should - always be populated on List and Get operations, - unless a field projection has been specified - that strips it out. - validity (google.rpc.status_pb2.Status): - Read-only description of how the alert policy - is invalid. OK if the alert policy is valid. If - not OK, the alert policy will not generate - incidents. - notification_channels (Sequence[str]): - Identifies the notification channels to which notifications - should be sent when incidents are opened or closed or when - new violations occur on an already opened incident. Each - element of this array corresponds to the ``name`` field in - each of the - [``NotificationChannel``][google.monitoring.v3.NotificationChannel] - objects that are returned from the - [``ListNotificationChannels``] - [google.monitoring.v3.NotificationChannelService.ListNotificationChannels] - method. The format of the entries in this field is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] - creation_record (google.cloud.monitoring_v3.types.MutationRecord): - A read-only record of the creation of the - alerting policy. If provided in a call to create - or update, this field will be ignored. - mutation_record (google.cloud.monitoring_v3.types.MutationRecord): - A read-only record of the most recent change - to the alerting policy. If provided in a call to - create or update, this field will be ignored. - alert_strategy (google.cloud.monitoring_v3.types.AlertPolicy.AlertStrategy): - Control over how this alert policy's - notification channels are notified. - """ - class ConditionCombinerType(proto.Enum): - r"""Operators for combining conditions.""" - COMBINE_UNSPECIFIED = 0 - AND = 1 - OR = 2 - AND_WITH_MATCHING_RESOURCE = 3 - - class Documentation(proto.Message): - r"""A content string and a MIME type that describes the content - string's format. - - Attributes: - content (str): - The text of the documentation, interpreted according to - ``mime_type``. The content may not exceed 8,192 Unicode - characters and may not exceed more than 10,240 bytes when - encoded in UTF-8 format, whichever is smaller. - mime_type (str): - The format of the ``content`` field. Presently, only the - value ``"text/markdown"`` is supported. See - `Markdown `__ for - more information. - """ - - content = proto.Field( - proto.STRING, - number=1, - ) - mime_type = proto.Field( - proto.STRING, - number=2, - ) - - class Condition(proto.Message): - r"""A condition is a true/false test that determines when an - alerting policy should open an incident. If a condition - evaluates to true, it signifies that something is wrong. - - Attributes: - name (str): - Required if the condition exists. The unique resource name - for this condition. Its format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[POLICY_ID]/conditions/[CONDITION_ID] - - ``[CONDITION_ID]`` is assigned by Stackdriver Monitoring - when the condition is created as part of a new or updated - alerting policy. - - When calling the - [alertPolicies.create][google.monitoring.v3.AlertPolicyService.CreateAlertPolicy] - method, do not include the ``name`` field in the conditions - of the requested alerting policy. Stackdriver Monitoring - creates the condition identifiers and includes them in the - new policy. - - When calling the - [alertPolicies.update][google.monitoring.v3.AlertPolicyService.UpdateAlertPolicy] - method to update a policy, including a condition ``name`` - causes the existing condition to be updated. Conditions - without names are added to the updated policy. Existing - conditions are deleted if they are not updated. - - Best practice is to preserve ``[CONDITION_ID]`` if you make - only small changes, such as those to condition thresholds, - durations, or trigger values. Otherwise, treat the change as - a new condition and let the existing condition be deleted. - display_name (str): - A short name or phrase used to identify the - condition in dashboards, notifications, and - incidents. To avoid confusion, don't use the - same display name for multiple conditions in the - same policy. - condition_threshold (google.cloud.monitoring_v3.types.AlertPolicy.Condition.MetricThreshold): - A condition that compares a time series - against a threshold. - condition_absent (google.cloud.monitoring_v3.types.AlertPolicy.Condition.MetricAbsence): - A condition that checks that a time series - continues to receive new data points. - condition_matched_log (google.cloud.monitoring_v3.types.AlertPolicy.Condition.LogMatch): - A condition that checks for log messages - matching given constraints. If set, no other - conditions can be present. - condition_monitoring_query_language (google.cloud.monitoring_v3.types.AlertPolicy.Condition.MonitoringQueryLanguageCondition): - A condition that uses the Monitoring Query - Language to define alerts. - """ - - class Trigger(proto.Message): - r"""Specifies how many time series must fail a predicate to trigger a - condition. If not specified, then a ``{count: 1}`` trigger is used. - - Attributes: - count (int): - The absolute number of time series that must - fail the predicate for the condition to be - triggered. - percent (float): - The percentage of time series that must fail - the predicate for the condition to be triggered. - """ - - count = proto.Field( - proto.INT32, - number=1, - oneof='type', - ) - percent = proto.Field( - proto.DOUBLE, - number=2, - oneof='type', - ) - - class MetricThreshold(proto.Message): - r"""A condition type that compares a collection of time series - against a threshold. - - Attributes: - filter (str): - Required. A - `filter `__ - that identifies which time series should be compared with - the threshold. - - The filter is similar to the one that is specified in the - ```ListTimeSeries`` - request `__ - (that call is useful to verify the time series that will be - retrieved / processed). The filter must specify the metric - type and the resource type. Optionally, it can specify - resource labels and metric labels. This field must not - exceed 2048 Unicode characters in length. - aggregations (Sequence[google.cloud.monitoring_v3.types.Aggregation]): - Specifies the alignment of data points in individual time - series as well as how to combine the retrieved time series - together (such as when aggregating multiple streams on each - resource to a single stream for each resource or when - aggregating streams across all members of a group of - resrouces). Multiple aggregations are applied in the order - specified. - - This field is similar to the one in the ```ListTimeSeries`` - request `__. - It is advisable to use the ``ListTimeSeries`` method when - debugging this field. - denominator_filter (str): - A - `filter `__ - that identifies a time series that should be used as the - denominator of a ratio that will be compared with the - threshold. If a ``denominator_filter`` is specified, the - time series specified by the ``filter`` field will be used - as the numerator. - - The filter must specify the metric type and optionally may - contain restrictions on resource type, resource labels, and - metric labels. This field may not exceed 2048 Unicode - characters in length. - denominator_aggregations (Sequence[google.cloud.monitoring_v3.types.Aggregation]): - Specifies the alignment of data points in individual time - series selected by ``denominatorFilter`` as well as how to - combine the retrieved time series together (such as when - aggregating multiple streams on each resource to a single - stream for each resource or when aggregating streams across - all members of a group of resources). - - When computing ratios, the ``aggregations`` and - ``denominator_aggregations`` fields must use the same - alignment period and produce time series that have the same - periodicity and labels. - comparison (google.cloud.monitoring_v3.types.ComparisonType): - The comparison to apply between the time series (indicated - by ``filter`` and ``aggregation``) and the threshold - (indicated by ``threshold_value``). The comparison is - applied on each time series, with the time series on the - left-hand side and the threshold on the right-hand side. - - Only ``COMPARISON_LT`` and ``COMPARISON_GT`` are supported - currently. - threshold_value (float): - A value against which to compare the time - series. - duration (google.protobuf.duration_pb2.Duration): - The amount of time that a time series must violate the - threshold to be considered failing. Currently, only values - that are a multiple of a minute--e.g., 0, 60, 120, or 300 - seconds--are supported. If an invalid value is given, an - error will be returned. When choosing a duration, it is - useful to keep in mind the frequency of the underlying time - series data (which may also be affected by any alignments - specified in the ``aggregations`` field); a good duration is - long enough so that a single outlier does not generate - spurious alerts, but short enough that unhealthy states are - detected and alerted on quickly. - trigger (google.cloud.monitoring_v3.types.AlertPolicy.Condition.Trigger): - The number/percent of time series for which the comparison - must hold in order for the condition to trigger. If - unspecified, then the condition will trigger if the - comparison is true for any of the time series that have been - identified by ``filter`` and ``aggregations``, or by the - ratio, if ``denominator_filter`` and - ``denominator_aggregations`` are specified. - """ - - filter = proto.Field( - proto.STRING, - number=2, - ) - aggregations = proto.RepeatedField( - proto.MESSAGE, - number=8, - message=common.Aggregation, - ) - denominator_filter = proto.Field( - proto.STRING, - number=9, - ) - denominator_aggregations = proto.RepeatedField( - proto.MESSAGE, - number=10, - message=common.Aggregation, - ) - comparison = proto.Field( - proto.ENUM, - number=4, - enum=common.ComparisonType, - ) - threshold_value = proto.Field( - proto.DOUBLE, - number=5, - ) - duration = proto.Field( - proto.MESSAGE, - number=6, - message=duration_pb2.Duration, - ) - trigger = proto.Field( - proto.MESSAGE, - number=7, - message='AlertPolicy.Condition.Trigger', - ) - - class MetricAbsence(proto.Message): - r"""A condition type that checks that monitored resources are reporting - data. The configuration defines a metric and a set of monitored - resources. The predicate is considered in violation when a time - series for the specified metric of a monitored resource does not - include any data in the specified ``duration``. - - Attributes: - filter (str): - Required. A - `filter `__ - that identifies which time series should be compared with - the threshold. - - The filter is similar to the one that is specified in the - ```ListTimeSeries`` - request `__ - (that call is useful to verify the time series that will be - retrieved / processed). The filter must specify the metric - type and the resource type. Optionally, it can specify - resource labels and metric labels. This field must not - exceed 2048 Unicode characters in length. - aggregations (Sequence[google.cloud.monitoring_v3.types.Aggregation]): - Specifies the alignment of data points in individual time - series as well as how to combine the retrieved time series - together (such as when aggregating multiple streams on each - resource to a single stream for each resource or when - aggregating streams across all members of a group of - resrouces). Multiple aggregations are applied in the order - specified. - - This field is similar to the one in the ```ListTimeSeries`` - request `__. - It is advisable to use the ``ListTimeSeries`` method when - debugging this field. - duration (google.protobuf.duration_pb2.Duration): - The amount of time that a time series must fail to report - new data to be considered failing. The minimum value of this - field is 120 seconds. Larger values that are a multiple of a - minute--for example, 240 or 300 seconds--are supported. If - an invalid value is given, an error will be returned. The - ``Duration.nanos`` field is ignored. - trigger (google.cloud.monitoring_v3.types.AlertPolicy.Condition.Trigger): - The number/percent of time series for which the comparison - must hold in order for the condition to trigger. If - unspecified, then the condition will trigger if the - comparison is true for any of the time series that have been - identified by ``filter`` and ``aggregations``. - """ - - filter = proto.Field( - proto.STRING, - number=1, - ) - aggregations = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=common.Aggregation, - ) - duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - trigger = proto.Field( - proto.MESSAGE, - number=3, - message='AlertPolicy.Condition.Trigger', - ) - - class LogMatch(proto.Message): - r"""A condition type that checks whether a log message in the `scoping - project `__ - satisfies the given filter. Logs from other projects in the metrics - scope are not evaluated. - - Attributes: - filter (str): - Required. A logs-based filter. See `Advanced Logs - Queries `__ for how - this filter should be constructed. - label_extractors (Sequence[google.cloud.monitoring_v3.types.AlertPolicy.Condition.LogMatch.LabelExtractorsEntry]): - Optional. A map from a label key to an extractor expression, - which is used to extract the value for this label key. Each - entry in this map is a specification for how data should be - extracted from log entries that match ``filter``. Each - combination of extracted values is treated as a separate - rule for the purposes of triggering notifications. Label - keys and corresponding values can be used in notifications - generated by this condition. - - Please see `the documentation on logs-based metric - ``valueExtractor``\ s `__ - for syntax and examples. - """ - - filter = proto.Field( - proto.STRING, - number=1, - ) - label_extractors = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - class MonitoringQueryLanguageCondition(proto.Message): - r"""A condition type that allows alert policies to be defined using - `Monitoring Query - Language `__. - - Attributes: - query (str): - `Monitoring Query - Language `__ query - that outputs a boolean stream. - duration (google.protobuf.duration_pb2.Duration): - The amount of time that a time series must violate the - threshold to be considered failing. Currently, only values - that are a multiple of a minute--e.g., 0, 60, 120, or 300 - seconds--are supported. If an invalid value is given, an - error will be returned. When choosing a duration, it is - useful to keep in mind the frequency of the underlying time - series data (which may also be affected by any alignments - specified in the ``aggregations`` field); a good duration is - long enough so that a single outlier does not generate - spurious alerts, but short enough that unhealthy states are - detected and alerted on quickly. - trigger (google.cloud.monitoring_v3.types.AlertPolicy.Condition.Trigger): - The number/percent of time series for which the comparison - must hold in order for the condition to trigger. If - unspecified, then the condition will trigger if the - comparison is true for any of the time series that have been - identified by ``filter`` and ``aggregations``, or by the - ratio, if ``denominator_filter`` and - ``denominator_aggregations`` are specified. - """ - - query = proto.Field( - proto.STRING, - number=1, - ) - duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - trigger = proto.Field( - proto.MESSAGE, - number=3, - message='AlertPolicy.Condition.Trigger', - ) - - name = proto.Field( - proto.STRING, - number=12, - ) - display_name = proto.Field( - proto.STRING, - number=6, - ) - condition_threshold = proto.Field( - proto.MESSAGE, - number=1, - oneof='condition', - message='AlertPolicy.Condition.MetricThreshold', - ) - condition_absent = proto.Field( - proto.MESSAGE, - number=2, - oneof='condition', - message='AlertPolicy.Condition.MetricAbsence', - ) - condition_matched_log = proto.Field( - proto.MESSAGE, - number=20, - oneof='condition', - message='AlertPolicy.Condition.LogMatch', - ) - condition_monitoring_query_language = proto.Field( - proto.MESSAGE, - number=19, - oneof='condition', - message='AlertPolicy.Condition.MonitoringQueryLanguageCondition', - ) - - class AlertStrategy(proto.Message): - r"""Control over how the notification channels in - ``notification_channels`` are notified when this alert fires. - - Attributes: - notification_rate_limit (google.cloud.monitoring_v3.types.AlertPolicy.AlertStrategy.NotificationRateLimit): - Required for alert policies with a ``LogMatch`` condition. - - This limit is not implemented for alert policies that are - not log-based. - """ - - class NotificationRateLimit(proto.Message): - r"""Control over the rate of notifications sent to this alert - policy's notification channels. - - Attributes: - period (google.protobuf.duration_pb2.Duration): - Not more than one notification per ``period``. - """ - - period = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - - notification_rate_limit = proto.Field( - proto.MESSAGE, - number=1, - message='AlertPolicy.AlertStrategy.NotificationRateLimit', - ) - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - documentation = proto.Field( - proto.MESSAGE, - number=13, - message=Documentation, - ) - user_labels = proto.MapField( - proto.STRING, - proto.STRING, - number=16, - ) - conditions = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=Condition, - ) - combiner = proto.Field( - proto.ENUM, - number=6, - enum=ConditionCombinerType, - ) - enabled = proto.Field( - proto.MESSAGE, - number=17, - message=wrappers_pb2.BoolValue, - ) - validity = proto.Field( - proto.MESSAGE, - number=18, - message=status_pb2.Status, - ) - notification_channels = proto.RepeatedField( - proto.STRING, - number=14, - ) - creation_record = proto.Field( - proto.MESSAGE, - number=10, - message=gm_mutation_record.MutationRecord, - ) - mutation_record = proto.Field( - proto.MESSAGE, - number=11, - message=gm_mutation_record.MutationRecord, - ) - alert_strategy = proto.Field( - proto.MESSAGE, - number=21, - message=AlertStrategy, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert_service.py deleted file mode 100644 index 344e0a38..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/alert_service.py +++ /dev/null @@ -1,257 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.monitoring_v3.types import alert -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'CreateAlertPolicyRequest', - 'GetAlertPolicyRequest', - 'ListAlertPoliciesRequest', - 'ListAlertPoliciesResponse', - 'UpdateAlertPolicyRequest', - 'DeleteAlertPolicyRequest', - }, -) - - -class CreateAlertPolicyRequest(proto.Message): - r"""The protocol for the ``CreateAlertPolicy`` request. - Attributes: - name (str): - Required. The - `project `__ - in which to create the alerting policy. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - Note that this field names the parent container in which the - alerting policy will be written, not the name of the created - policy. \|name\| must be a host project of a workspace, - otherwise INVALID_ARGUMENT error will return. The alerting - policy that is returned will have a name that contains a - normalized representation of this name as a prefix but adds - a suffix of the form ``/alertPolicies/[ALERT_POLICY_ID]``, - identifying the policy in the container. - alert_policy (google.cloud.monitoring_v3.types.AlertPolicy): - Required. The requested alerting policy. You should omit the - ``name`` field in this policy. The name will be returned in - the new policy, including a new ``[ALERT_POLICY_ID]`` value. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - alert_policy = proto.Field( - proto.MESSAGE, - number=2, - message=alert.AlertPolicy, - ) - - -class GetAlertPolicyRequest(proto.Message): - r"""The protocol for the ``GetAlertPolicy`` request. - Attributes: - name (str): - Required. The alerting policy to retrieve. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - - -class ListAlertPoliciesRequest(proto.Message): - r"""The protocol for the ``ListAlertPolicies`` request. - Attributes: - name (str): - Required. The - `project `__ - whose alert policies are to be listed. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - Note that this field names the parent container in which the - alerting policies to be listed are stored. To retrieve a - single alerting policy by name, use the - [GetAlertPolicy][google.monitoring.v3.AlertPolicyService.GetAlertPolicy] - operation, instead. - filter (str): - If provided, this field specifies the criteria that must be - met by alert policies to be included in the response. - - For more details, see `sorting and - filtering `__. - order_by (str): - A comma-separated list of fields by which to sort the - result. Supports the same set of field references as the - ``filter`` field. Entries can be prefixed with a minus sign - to sort by the field in descending order. - - For more details, see `sorting and - filtering `__. - page_size (int): - The maximum number of results to return in a - single response. - page_token (str): - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return more - results from the previous method call. - """ - - name = proto.Field( - proto.STRING, - number=4, - ) - filter = proto.Field( - proto.STRING, - number=5, - ) - order_by = proto.Field( - proto.STRING, - number=6, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class ListAlertPoliciesResponse(proto.Message): - r"""The protocol for the ``ListAlertPolicies`` response. - Attributes: - alert_policies (Sequence[google.cloud.monitoring_v3.types.AlertPolicy]): - The returned alert policies. - next_page_token (str): - If there might be more results than were returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call - to this method. - total_size (int): - The total number of alert policies in all - pages. This number is only an estimate, and may - change in subsequent pages. https://aip.dev/158 - """ - - @property - def raw_page(self): - return self - - alert_policies = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=alert.AlertPolicy, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - total_size = proto.Field( - proto.INT32, - number=4, - ) - - -class UpdateAlertPolicyRequest(proto.Message): - r"""The protocol for the ``UpdateAlertPolicy`` request. - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. A list of alerting policy field names. If this - field is not empty, each listed field in the existing - alerting policy is set to the value of the corresponding - field in the supplied policy (``alert_policy``), or to the - field's default value if the field is not in the supplied - alerting policy. Fields not listed retain their previous - value. - - Examples of valid field masks include ``display_name``, - ``documentation``, ``documentation.content``, - ``documentation.mime_type``, ``user_labels``, - ``user_label.nameofkey``, ``enabled``, ``conditions``, - ``combiner``, etc. - - If this field is empty, then the supplied alerting policy - replaces the existing policy. It is the same as deleting the - existing policy and adding the supplied policy, except for - the following: - - - The new policy will have the same ``[ALERT_POLICY_ID]`` - as the former policy. This gives you continuity with the - former policy in your notifications and incidents. - - Conditions in the new policy will keep their former - ``[CONDITION_ID]`` if the supplied condition includes the - ``name`` field with that ``[CONDITION_ID]``. If the - supplied condition omits the ``name`` field, then a new - ``[CONDITION_ID]`` is created. - alert_policy (google.cloud.monitoring_v3.types.AlertPolicy): - Required. The updated alerting policy or the updated values - for the fields listed in ``update_mask``. If ``update_mask`` - is not empty, any fields in this policy that are not in - ``update_mask`` are ignored. - """ - - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - alert_policy = proto.Field( - proto.MESSAGE, - number=3, - message=alert.AlertPolicy, - ) - - -class DeleteAlertPolicyRequest(proto.Message): - r"""The protocol for the ``DeleteAlertPolicy`` request. - Attributes: - name (str): - Required. The alerting policy to delete. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] - - For more information, see - [AlertPolicy][google.monitoring.v3.AlertPolicy]. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/common.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/common.py deleted file mode 100644 index 6d5c2536..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/common.py +++ /dev/null @@ -1,333 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.api import distribution_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'ComparisonType', - 'TypedValue', - 'TimeInterval', - 'Aggregation', - }, -) - - -class ComparisonType(proto.Enum): - r"""Specifies an ordering relationship on two arguments, called ``left`` - and ``right``. - """ - COMPARISON_UNSPECIFIED = 0 - COMPARISON_GT = 1 - COMPARISON_GE = 2 - COMPARISON_LT = 3 - COMPARISON_LE = 4 - COMPARISON_EQ = 5 - COMPARISON_NE = 6 - - -class TypedValue(proto.Message): - r"""A single strongly-typed value. - Attributes: - bool_value (bool): - A Boolean value: ``true`` or ``false``. - int64_value (int): - A 64-bit integer. Its range is approximately - ±9.2x1018. - double_value (float): - A 64-bit double-precision floating-point - number. Its magnitude is approximately - ±10±300 and it has 16 - significant digits of precision. - string_value (str): - A variable-length string value. - distribution_value (google.api.distribution_pb2.Distribution): - A distribution value. - """ - - bool_value = proto.Field( - proto.BOOL, - number=1, - oneof='value', - ) - int64_value = proto.Field( - proto.INT64, - number=2, - oneof='value', - ) - double_value = proto.Field( - proto.DOUBLE, - number=3, - oneof='value', - ) - string_value = proto.Field( - proto.STRING, - number=4, - oneof='value', - ) - distribution_value = proto.Field( - proto.MESSAGE, - number=5, - oneof='value', - message=distribution_pb2.Distribution, - ) - - -class TimeInterval(proto.Message): - r"""A closed time interval. It extends from the start time to the end - time, and includes both: ``[startTime, endTime]``. Valid time - intervals depend on the - ```MetricKind`` `__ - of the metric value. The end time must not be earlier than the start - time. When writing data points, the start time must not be more than - 25 hours in the past and the end time must not be more than five - minutes in the future. - - - For ``GAUGE`` metrics, the ``startTime`` value is technically - optional; if no value is specified, the start time defaults to - the value of the end time, and the interval represents a single - point in time. If both start and end times are specified, they - must be identical. Such an interval is valid only for ``GAUGE`` - metrics, which are point-in-time measurements. The end time of a - new interval must be at least a millisecond after the end time of - the previous interval. - - - For ``DELTA`` metrics, the start time and end time must specify a - non-zero interval, with subsequent points specifying contiguous - and non-overlapping intervals. For ``DELTA`` metrics, the start - time of the next interval must be at least a millisecond after - the end time of the previous interval. - - - For ``CUMULATIVE`` metrics, the start time and end time must - specify a a non-zero interval, with subsequent points specifying - the same start time and increasing end times, until an event - resets the cumulative value to zero and sets a new start time for - the following points. The new start time must be at least a - millisecond after the end time of the previous interval. - - - The start time of a new interval must be at least a millisecond - after the end time of the previous interval because intervals are - closed. If the start time of a new interval is the same as the - end time of the previous interval, then data written at the new - start time could overwrite data written at the previous end time. - - Attributes: - end_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The end of the time interval. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The beginning of the time interval. - The default value for the start time is the end - time. The start time must not be later than the - end time. - """ - - end_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - start_time = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - - -class Aggregation(proto.Message): - r"""Describes how to combine multiple time series to provide a different - view of the data. Aggregation of time series is done in two steps. - First, each time series in the set is *aligned* to the same time - interval boundaries, then the set of time series is optionally - *reduced* in number. - - Alignment consists of applying the ``per_series_aligner`` operation - to each time series after its data has been divided into regular - ``alignment_period`` time intervals. This process takes *all* of the - data points in an alignment period, applies a mathematical - transformation such as averaging, minimum, maximum, delta, etc., and - converts them into a single data point per period. - - Reduction is when the aligned and transformed time series can - optionally be combined, reducing the number of time series through - similar mathematical transformations. Reduction involves applying a - ``cross_series_reducer`` to all the time series, optionally sorting - the time series into subsets with ``group_by_fields``, and applying - the reducer to each subset. - - The raw time series data can contain a huge amount of information - from multiple sources. Alignment and reduction transforms this mass - of data into a more manageable and representative collection of - data, for example "the 95% latency across the average of all tasks - in a cluster". This representative data can be more easily graphed - and comprehended, and the individual time series data is still - available for later drilldown. For more details, see `Filtering and - aggregation `__. - - Attributes: - alignment_period (google.protobuf.duration_pb2.Duration): - The ``alignment_period`` specifies a time interval, in - seconds, that is used to divide the data in all the [time - series][google.monitoring.v3.TimeSeries] into consistent - blocks of time. This will be done before the per-series - aligner can be applied to the data. - - The value must be at least 60 seconds. If a per-series - aligner other than ``ALIGN_NONE`` is specified, this field - is required or an error is returned. If no per-series - aligner is specified, or the aligner ``ALIGN_NONE`` is - specified, then this field is ignored. - - The maximum value of the ``alignment_period`` is 104 weeks - (2 years) for charts, and 90,000 seconds (25 hours) for - alerting policies. - per_series_aligner (google.cloud.monitoring_v3.types.Aggregation.Aligner): - An ``Aligner`` describes how to bring the data points in a - single time series into temporal alignment. Except for - ``ALIGN_NONE``, all alignments cause all the data points in - an ``alignment_period`` to be mathematically grouped - together, resulting in a single data point for each - ``alignment_period`` with end timestamp at the end of the - period. - - Not all alignment operations may be applied to all time - series. The valid choices depend on the ``metric_kind`` and - ``value_type`` of the original time series. Alignment can - change the ``metric_kind`` or the ``value_type`` of the time - series. - - Time series data must be aligned in order to perform - cross-time series reduction. If ``cross_series_reducer`` is - specified, then ``per_series_aligner`` must be specified and - not equal to ``ALIGN_NONE`` and ``alignment_period`` must be - specified; otherwise, an error is returned. - cross_series_reducer (google.cloud.monitoring_v3.types.Aggregation.Reducer): - The reduction operation to be used to combine time series - into a single time series, where the value of each data - point in the resulting series is a function of all the - already aligned values in the input time series. - - Not all reducer operations can be applied to all time - series. The valid choices depend on the ``metric_kind`` and - the ``value_type`` of the original time series. Reduction - can yield a time series with a different ``metric_kind`` or - ``value_type`` than the input time series. - - Time series data must first be aligned (see - ``per_series_aligner``) in order to perform cross-time - series reduction. If ``cross_series_reducer`` is specified, - then ``per_series_aligner`` must be specified, and must not - be ``ALIGN_NONE``. An ``alignment_period`` must also be - specified; otherwise, an error is returned. - group_by_fields (Sequence[str]): - The set of fields to preserve when ``cross_series_reducer`` - is specified. The ``group_by_fields`` determine how the time - series are partitioned into subsets prior to applying the - aggregation operation. Each subset contains time series that - have the same value for each of the grouping fields. Each - individual time series is a member of exactly one subset. - The ``cross_series_reducer`` is applied to each subset of - time series. It is not possible to reduce across different - resource types, so this field implicitly contains - ``resource.type``. Fields not specified in - ``group_by_fields`` are aggregated away. If - ``group_by_fields`` is not specified and all the time series - have the same resource type, then the time series are - aggregated into a single output time series. If - ``cross_series_reducer`` is not defined, this field is - ignored. - """ - class Aligner(proto.Enum): - r"""The ``Aligner`` specifies the operation that will be applied to the - data points in each alignment period in a time series. Except for - ``ALIGN_NONE``, which specifies that no operation be applied, each - alignment operation replaces the set of data values in each - alignment period with a single value: the result of applying the - operation to the data values. An aligned time series has a single - data value at the end of each ``alignment_period``. - - An alignment operation can change the data type of the values, too. - For example, if you apply a counting operation to boolean values, - the data ``value_type`` in the original time series is ``BOOLEAN``, - but the ``value_type`` in the aligned result is ``INT64``. - """ - ALIGN_NONE = 0 - ALIGN_DELTA = 1 - ALIGN_RATE = 2 - ALIGN_INTERPOLATE = 3 - ALIGN_NEXT_OLDER = 4 - ALIGN_MIN = 10 - ALIGN_MAX = 11 - ALIGN_MEAN = 12 - ALIGN_COUNT = 13 - ALIGN_SUM = 14 - ALIGN_STDDEV = 15 - ALIGN_COUNT_TRUE = 16 - ALIGN_COUNT_FALSE = 24 - ALIGN_FRACTION_TRUE = 17 - ALIGN_PERCENTILE_99 = 18 - ALIGN_PERCENTILE_95 = 19 - ALIGN_PERCENTILE_50 = 20 - ALIGN_PERCENTILE_05 = 21 - ALIGN_PERCENT_CHANGE = 23 - - class Reducer(proto.Enum): - r"""A Reducer operation describes how to aggregate data points - from multiple time series into a single time series, where the - value of each data point in the resulting series is a function - of all the already aligned values in the input time series. - """ - REDUCE_NONE = 0 - REDUCE_MEAN = 1 - REDUCE_MIN = 2 - REDUCE_MAX = 3 - REDUCE_SUM = 4 - REDUCE_STDDEV = 5 - REDUCE_COUNT = 6 - REDUCE_COUNT_TRUE = 7 - REDUCE_COUNT_FALSE = 15 - REDUCE_FRACTION_TRUE = 8 - REDUCE_PERCENTILE_99 = 9 - REDUCE_PERCENTILE_95 = 10 - REDUCE_PERCENTILE_50 = 11 - REDUCE_PERCENTILE_05 = 12 - - alignment_period = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - per_series_aligner = proto.Field( - proto.ENUM, - number=2, - enum=Aligner, - ) - cross_series_reducer = proto.Field( - proto.ENUM, - number=4, - enum=Reducer, - ) - group_by_fields = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/dropped_labels.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/dropped_labels.py deleted file mode 100644 index 10bf595e..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/dropped_labels.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'DroppedLabels', - }, -) - - -class DroppedLabels(proto.Message): - r"""A set of (label, value) pairs that were removed from a - Distribution time series during aggregation and then added as an - attachment to a Distribution.Exemplar. - - The full label set for the exemplars is constructed by using the - dropped pairs in combination with the label values that remain - on the aggregated Distribution time series. The constructed full - label set can be used to identify the specific entity, such as - the instance or job, which might be contributing to a long-tail. - However, with dropped labels, the storage requirements are - reduced because only the aggregated distribution values for a - large group of time series are stored. - - Note that there are no guarantees on ordering of the labels from - exemplar-to-exemplar and from distribution-to-distribution in - the same stream, and there may be duplicates. It is up to - clients to resolve any ambiguities. - - Attributes: - label (Sequence[google.cloud.monitoring_v3.types.DroppedLabels.LabelEntry]): - Map from label to its value, for all labels - dropped in any aggregation. - """ - - label = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group.py deleted file mode 100644 index 7032d8c0..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'Group', - }, -) - - -class Group(proto.Message): - r"""The description of a dynamic collection of monitored resources. Each - group has a filter that is matched against monitored resources and - their associated metadata. If a group's filter matches an available - monitored resource, then that resource is a member of that group. - Groups can contain any number of monitored resources, and each - monitored resource can be a member of any number of groups. - - Groups can be nested in parent-child hierarchies. The ``parentName`` - field identifies an optional parent for each group. If a group has a - parent, then the only monitored resources available to be matched by - the group's filter are the resources contained in the parent group. - In other words, a group contains the monitored resources that match - its filter and the filters of all the group's ancestors. A group - without a parent can contain any monitored resource. - - For example, consider an infrastructure running a set of instances - with two user-defined tags: ``"environment"`` and ``"role"``. A - parent group has a filter, ``environment="production"``. A child of - that parent group has a filter, ``role="transcoder"``. The parent - group contains all instances in the production environment, - regardless of their roles. The child group contains instances that - have the transcoder role *and* are in the production environment. - - The monitored resources contained in a group can change at any - moment, depending on what resources exist and what filters are - associated with the group and its ancestors. - - Attributes: - name (str): - Output only. The name of this group. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - When creating a group, this field is ignored and a new name - is created consisting of the project specified in the call - to ``CreateGroup`` and a unique ``[GROUP_ID]`` that is - generated automatically. - display_name (str): - A user-assigned name for this group, used - only for display purposes. - parent_name (str): - The name of the group's parent, if it has one. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - For groups with no parent, ``parent_name`` is the empty - string, ``""``. - filter (str): - The filter used to determine which monitored - resources belong to this group. - is_cluster (bool): - If true, the members of this group are - considered to be a cluster. The system can - perform additional analysis on groups that are - clusters. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - parent_name = proto.Field( - proto.STRING, - number=3, - ) - filter = proto.Field( - proto.STRING, - number=5, - ) - is_cluster = proto.Field( - proto.BOOL, - number=6, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group_service.py deleted file mode 100644 index 0e57590a..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/group_service.py +++ /dev/null @@ -1,346 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.types import common -from google.cloud.monitoring_v3.types import group as gm_group - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'ListGroupsRequest', - 'ListGroupsResponse', - 'GetGroupRequest', - 'CreateGroupRequest', - 'UpdateGroupRequest', - 'DeleteGroupRequest', - 'ListGroupMembersRequest', - 'ListGroupMembersResponse', - }, -) - - -class ListGroupsRequest(proto.Message): - r"""The ``ListGroup`` request. - Attributes: - name (str): - Required. The - `project `__ - whose groups are to be listed. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - children_of_group (str): - A group name. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - Returns groups whose ``parent_name`` field contains the - group name. If no groups have this parent, the results are - empty. - ancestors_of_group (str): - A group name. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - Returns groups that are ancestors of the specified group. - The groups are returned in order, starting with the - immediate parent and ending with the most distant ancestor. - If the specified group has no immediate parent, the results - are empty. - descendants_of_group (str): - A group name. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - - Returns the descendants of the specified group. This is a - superset of the results returned by the - ``children_of_group`` filter, and includes - children-of-children, and so forth. - page_size (int): - A positive number that is the maximum number - of results to return. - page_token (str): - If this field is not empty then it must contain the - ``next_page_token`` value returned by a previous call to - this method. Using this field causes the method to return - additional results from the previous method call. - """ - - name = proto.Field( - proto.STRING, - number=7, - ) - children_of_group = proto.Field( - proto.STRING, - number=2, - oneof='filter', - ) - ancestors_of_group = proto.Field( - proto.STRING, - number=3, - oneof='filter', - ) - descendants_of_group = proto.Field( - proto.STRING, - number=4, - oneof='filter', - ) - page_size = proto.Field( - proto.INT32, - number=5, - ) - page_token = proto.Field( - proto.STRING, - number=6, - ) - - -class ListGroupsResponse(proto.Message): - r"""The ``ListGroups`` response. - Attributes: - group (Sequence[google.cloud.monitoring_v3.types.Group]): - The groups that match the specified filters. - next_page_token (str): - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call - to this method. - """ - - @property - def raw_page(self): - return self - - group = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gm_group.Group, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetGroupRequest(proto.Message): - r"""The ``GetGroup`` request. - Attributes: - name (str): - Required. The group to retrieve. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - - -class CreateGroupRequest(proto.Message): - r"""The ``CreateGroup`` request. - Attributes: - name (str): - Required. The - `project `__ - in which to create the group. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - group (google.cloud.monitoring_v3.types.Group): - Required. A group definition. It is an error to define the - ``name`` field because the system assigns the name. - validate_only (bool): - If true, validate this request but do not - create the group. - """ - - name = proto.Field( - proto.STRING, - number=4, - ) - group = proto.Field( - proto.MESSAGE, - number=2, - message=gm_group.Group, - ) - validate_only = proto.Field( - proto.BOOL, - number=3, - ) - - -class UpdateGroupRequest(proto.Message): - r"""The ``UpdateGroup`` request. - Attributes: - group (google.cloud.monitoring_v3.types.Group): - Required. The new definition of the group. All fields of the - existing group, excepting ``name``, are replaced with the - corresponding fields of this group. - validate_only (bool): - If true, validate this request but do not - update the existing group. - """ - - group = proto.Field( - proto.MESSAGE, - number=2, - message=gm_group.Group, - ) - validate_only = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteGroupRequest(proto.Message): - r"""The ``DeleteGroup`` request. The default behavior is to be able to - delete a single group without any descendants. - - Attributes: - name (str): - Required. The group to delete. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - recursive (bool): - If this field is true, then the request means - to delete a group with all its descendants. - Otherwise, the request means to delete a group - only when it has no descendants. The default - value is false. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - recursive = proto.Field( - proto.BOOL, - number=4, - ) - - -class ListGroupMembersRequest(proto.Message): - r"""The ``ListGroupMembers`` request. - Attributes: - name (str): - Required. The group whose members are listed. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] - page_size (int): - A positive number that is the maximum number - of results to return. - page_token (str): - If this field is not empty then it must contain the - ``next_page_token`` value returned by a previous call to - this method. Using this field causes the method to return - additional results from the previous method call. - filter (str): - An optional `list - filter `__ - describing the members to be returned. The filter may - reference the type, labels, and metadata of monitored - resources that comprise the group. For example, to return - only resources representing Compute Engine VM instances, use - this filter: - - :: - - `resource.type = "gce_instance"` - interval (google.cloud.monitoring_v3.types.TimeInterval): - An optional time interval for which results - should be returned. Only members that were part - of the group during the specified interval are - included in the response. If no interval is - provided then the group membership over the last - minute is returned. - """ - - name = proto.Field( - proto.STRING, - number=7, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - filter = proto.Field( - proto.STRING, - number=5, - ) - interval = proto.Field( - proto.MESSAGE, - number=6, - message=common.TimeInterval, - ) - - -class ListGroupMembersResponse(proto.Message): - r"""The ``ListGroupMembers`` response. - Attributes: - members (Sequence[google.api.monitored_resource_pb2.MonitoredResource]): - A set of monitored resources in the group. - next_page_token (str): - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call - to this method. - total_size (int): - The total number of elements matching this - request. - """ - - @property - def raw_page(self): - return self - - members = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=monitored_resource_pb2.MonitoredResource, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - total_size = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric.py deleted file mode 100644 index 28baa973..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric.py +++ /dev/null @@ -1,417 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.api import label_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.types import common - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'Point', - 'TimeSeries', - 'TimeSeriesDescriptor', - 'TimeSeriesData', - 'LabelValue', - 'QueryError', - 'TextLocator', - }, -) - - -class Point(proto.Message): - r"""A single data point in a time series. - Attributes: - interval (google.cloud.monitoring_v3.types.TimeInterval): - The time interval to which the data point applies. For - ``GAUGE`` metrics, the start time is optional, but if it is - supplied, it must equal the end time. For ``DELTA`` metrics, - the start and end time should specify a non-zero interval, - with subsequent points specifying contiguous and - non-overlapping intervals. For ``CUMULATIVE`` metrics, the - start and end time should specify a non-zero interval, with - subsequent points specifying the same start time and - increasing end times, until an event resets the cumulative - value to zero and sets a new start time for the following - points. - value (google.cloud.monitoring_v3.types.TypedValue): - The value of the data point. - """ - - interval = proto.Field( - proto.MESSAGE, - number=1, - message=common.TimeInterval, - ) - value = proto.Field( - proto.MESSAGE, - number=2, - message=common.TypedValue, - ) - - -class TimeSeries(proto.Message): - r"""A collection of data points that describes the time-varying - values of a metric. A time series is identified by a combination - of a fully-specified monitored resource and a fully-specified - metric. This type is used for both listing and creating time - series. - - Attributes: - metric (google.api.metric_pb2.Metric): - The associated metric. A fully-specified - metric used to identify the time series. - resource (google.api.monitored_resource_pb2.MonitoredResource): - The associated monitored resource. Custom metrics can use - only certain monitored resource types in their time series - data. For more information, see `Monitored resources for - custom - metrics `__. - metadata (google.api.monitored_resource_pb2.MonitoredResourceMetadata): - Output only. The associated monitored - resource metadata. When reading a time series, - this field will include metadata labels that are - explicitly named in the reduction. When creating - a time series, this field is ignored. - metric_kind (google.api.metric_pb2.MetricKind): - The metric kind of the time series. When listing time - series, this metric kind might be different from the metric - kind of the associated metric if this time series is an - alignment or reduction of other time series. - - When creating a time series, this field is optional. If - present, it must be the same as the metric kind of the - associated metric. If the associated metric's descriptor - must be auto-created, then this field specifies the metric - kind of the new descriptor and must be either ``GAUGE`` (the - default) or ``CUMULATIVE``. - value_type (google.api.metric_pb2.ValueType): - The value type of the time series. When listing time series, - this value type might be different from the value type of - the associated metric if this time series is an alignment or - reduction of other time series. - - When creating a time series, this field is optional. If - present, it must be the same as the type of the data in the - ``points`` field. - points (Sequence[google.cloud.monitoring_v3.types.Point]): - The data points of this time series. When listing time - series, points are returned in reverse time order. - - When creating a time series, this field must contain exactly - one point and the point's type must be the same as the value - type of the associated metric. If the associated metric's - descriptor must be auto-created, then the value type of the - descriptor is determined by the point's type, which must be - ``BOOL``, ``INT64``, ``DOUBLE``, or ``DISTRIBUTION``. - unit (str): - The units in which the metric value is reported. It is only - applicable if the ``value_type`` is ``INT64``, ``DOUBLE``, - or ``DISTRIBUTION``. The ``unit`` defines the representation - of the stored metric values. - """ - - metric = proto.Field( - proto.MESSAGE, - number=1, - message=metric_pb2.Metric, - ) - resource = proto.Field( - proto.MESSAGE, - number=2, - message=monitored_resource_pb2.MonitoredResource, - ) - metadata = proto.Field( - proto.MESSAGE, - number=7, - message=monitored_resource_pb2.MonitoredResourceMetadata, - ) - metric_kind = proto.Field( - proto.ENUM, - number=3, - enum=metric_pb2.MetricDescriptor.MetricKind, - ) - value_type = proto.Field( - proto.ENUM, - number=4, - enum=metric_pb2.MetricDescriptor.ValueType, - ) - points = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='Point', - ) - unit = proto.Field( - proto.STRING, - number=8, - ) - - -class TimeSeriesDescriptor(proto.Message): - r"""A descriptor for the labels and points in a time series. - Attributes: - label_descriptors (Sequence[google.api.label_pb2.LabelDescriptor]): - Descriptors for the labels. - point_descriptors (Sequence[google.cloud.monitoring_v3.types.TimeSeriesDescriptor.ValueDescriptor]): - Descriptors for the point data value columns. - """ - - class ValueDescriptor(proto.Message): - r"""A descriptor for the value columns in a data point. - Attributes: - key (str): - The value key. - value_type (google.api.metric_pb2.ValueType): - The value type. - metric_kind (google.api.metric_pb2.MetricKind): - The value stream kind. - unit (str): - The unit in which ``time_series`` point values are reported. - ``unit`` follows the UCUM format for units as seen in - https://unitsofmeasure.org/ucum.html. ``unit`` is only valid - if ``value_type`` is INTEGER, DOUBLE, DISTRIBUTION. - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - value_type = proto.Field( - proto.ENUM, - number=2, - enum=metric_pb2.MetricDescriptor.ValueType, - ) - metric_kind = proto.Field( - proto.ENUM, - number=3, - enum=metric_pb2.MetricDescriptor.MetricKind, - ) - unit = proto.Field( - proto.STRING, - number=4, - ) - - label_descriptors = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=label_pb2.LabelDescriptor, - ) - point_descriptors = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=ValueDescriptor, - ) - - -class TimeSeriesData(proto.Message): - r"""Represents the values of a time series associated with a - TimeSeriesDescriptor. - - Attributes: - label_values (Sequence[google.cloud.monitoring_v3.types.LabelValue]): - The values of the labels in the time series identifier, - given in the same order as the ``label_descriptors`` field - of the TimeSeriesDescriptor associated with this object. - Each value must have a value of the type given in the - corresponding entry of ``label_descriptors``. - point_data (Sequence[google.cloud.monitoring_v3.types.TimeSeriesData.PointData]): - The points in the time series. - """ - - class PointData(proto.Message): - r"""A point's value columns and time interval. Each point has one or - more point values corresponding to the entries in - ``point_descriptors`` field in the TimeSeriesDescriptor associated - with this object. - - Attributes: - values (Sequence[google.cloud.monitoring_v3.types.TypedValue]): - The values that make up the point. - time_interval (google.cloud.monitoring_v3.types.TimeInterval): - The time interval associated with the point. - """ - - values = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=common.TypedValue, - ) - time_interval = proto.Field( - proto.MESSAGE, - number=2, - message=common.TimeInterval, - ) - - label_values = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='LabelValue', - ) - point_data = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=PointData, - ) - - -class LabelValue(proto.Message): - r"""A label value. - Attributes: - bool_value (bool): - A bool label value. - int64_value (int): - An int64 label value. - string_value (str): - A string label value. - """ - - bool_value = proto.Field( - proto.BOOL, - number=1, - oneof='value', - ) - int64_value = proto.Field( - proto.INT64, - number=2, - oneof='value', - ) - string_value = proto.Field( - proto.STRING, - number=3, - oneof='value', - ) - - -class QueryError(proto.Message): - r"""An error associated with a query in the time series query - language format. - - Attributes: - locator (google.cloud.monitoring_v3.types.TextLocator): - The location of the time series query - language text that this error applies to. - message (str): - The error message. - """ - - locator = proto.Field( - proto.MESSAGE, - number=1, - message='TextLocator', - ) - message = proto.Field( - proto.STRING, - number=2, - ) - - -class TextLocator(proto.Message): - r"""A locator for text. Indicates a particular part of the text of a - request or of an object referenced in the request. - - For example, suppose the request field ``text`` contains: - - text: "The quick brown fox jumps over the lazy dog." - - Then the locator: - - source: "text" start_position { line: 1 column: 17 } end_position { - line: 1 column: 19 } - - refers to the part of the text: "fox". - - Attributes: - source (str): - The source of the text. The source may be a field in the - request, in which case its format is the format of the - google.rpc.BadRequest.FieldViolation.field field in - https://cloud.google.com/apis/design/errors#error_details. - It may also be be a source other than the request field - (e.g. a macro definition referenced in the text of the - query), in which case this is the name of the source (e.g. - the macro name). - start_position (google.cloud.monitoring_v3.types.TextLocator.Position): - The position of the first byte within the - text. - end_position (google.cloud.monitoring_v3.types.TextLocator.Position): - The position of the last byte within the - text. - nested_locator (google.cloud.monitoring_v3.types.TextLocator): - If ``source``, ``start_position``, and ``end_position`` - describe a call on some object (e.g. a macro in the time - series query language text) and a location is to be - designated in that object's text, ``nested_locator`` - identifies the location within that object. - nesting_reason (str): - When ``nested_locator`` is set, this field gives the reason - for the nesting. Usually, the reason is a macro invocation. - In that case, the macro name (including the leading '@') - signals the location of the macro call in the text and a - macro argument name (including the leading '$') signals the - location of the macro argument inside the macro body that - got substituted away. - """ - - class Position(proto.Message): - r"""The position of a byte within the text. - Attributes: - line (int): - The line, starting with 1, where the byte is - positioned. - column (int): - The column within the line, starting with 1, - where the byte is positioned. This is a byte - index even though the text is UTF-8. - """ - - line = proto.Field( - proto.INT32, - number=1, - ) - column = proto.Field( - proto.INT32, - number=2, - ) - - source = proto.Field( - proto.STRING, - number=1, - ) - start_position = proto.Field( - proto.MESSAGE, - number=2, - message=Position, - ) - end_position = proto.Field( - proto.MESSAGE, - number=3, - message=Position, - ) - nested_locator = proto.Field( - proto.MESSAGE, - number=4, - message='TextLocator', - ) - nesting_reason = proto.Field( - proto.STRING, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric_service.py deleted file mode 100644 index 83db5344..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/metric_service.py +++ /dev/null @@ -1,665 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.api import metric_pb2 # type: ignore -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.monitoring_v3.types import common -from google.cloud.monitoring_v3.types import metric as gm_metric -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'GetMonitoredResourceDescriptorRequest', - 'ListMetricDescriptorsRequest', - 'ListMetricDescriptorsResponse', - 'GetMetricDescriptorRequest', - 'CreateMetricDescriptorRequest', - 'DeleteMetricDescriptorRequest', - 'ListTimeSeriesRequest', - 'ListTimeSeriesResponse', - 'CreateTimeSeriesRequest', - 'CreateTimeSeriesError', - 'CreateTimeSeriesSummary', - 'QueryTimeSeriesRequest', - 'QueryTimeSeriesResponse', - 'QueryErrorList', - }, -) - - -class ListMonitoredResourceDescriptorsRequest(proto.Message): - r"""The ``ListMonitoredResourceDescriptors`` request. - Attributes: - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - filter (str): - An optional - `filter `__ - describing the descriptors to be returned. The filter can - reference the descriptor's type and labels. For example, the - following filter returns only Google Compute Engine - descriptors that have an ``id`` label: - - :: - - resource.type = starts_with("gce_") AND resource.label:id - page_size (int): - A positive number that is the maximum number - of results to return. - page_token (str): - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """ - - name = proto.Field( - proto.STRING, - number=5, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListMonitoredResourceDescriptorsResponse(proto.Message): - r"""The ``ListMonitoredResourceDescriptors`` response. - Attributes: - resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): - The monitored resource descriptors that are available to - this project and that match ``filter``, if present. - next_page_token (str): - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call - to this method. - """ - - @property - def raw_page(self): - return self - - resource_descriptors = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=monitored_resource_pb2.MonitoredResourceDescriptor, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetMonitoredResourceDescriptorRequest(proto.Message): - r"""The ``GetMonitoredResourceDescriptor`` request. - Attributes: - name (str): - Required. The monitored resource descriptor to get. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/monitoredResourceDescriptors/[RESOURCE_TYPE] - - The ``[RESOURCE_TYPE]`` is a predefined type, such as - ``cloudsql_database``. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - - -class ListMetricDescriptorsRequest(proto.Message): - r"""The ``ListMetricDescriptors`` request. - Attributes: - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - filter (str): - If this field is empty, all custom and system-defined metric - descriptors are returned. Otherwise, the - `filter `__ - specifies which metric descriptors are to be returned. For - example, the following filter matches all `custom - metrics `__: - - :: - - metric.type = starts_with("custom.googleapis.com/") - page_size (int): - A positive number that is the maximum number - of results to return. - page_token (str): - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """ - - name = proto.Field( - proto.STRING, - number=5, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListMetricDescriptorsResponse(proto.Message): - r"""The ``ListMetricDescriptors`` response. - Attributes: - metric_descriptors (Sequence[google.api.metric_pb2.MetricDescriptor]): - The metric descriptors that are available to the project and - that match the value of ``filter``, if present. - next_page_token (str): - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call - to this method. - """ - - @property - def raw_page(self): - return self - - metric_descriptors = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=metric_pb2.MetricDescriptor, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetMetricDescriptorRequest(proto.Message): - r"""The ``GetMetricDescriptor`` request. - Attributes: - name (str): - Required. The metric descriptor on which to execute the - request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] - - An example value of ``[METRIC_ID]`` is - ``"compute.googleapis.com/instance/disk/read_bytes_count"``. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - - -class CreateMetricDescriptorRequest(proto.Message): - r"""The ``CreateMetricDescriptor`` request. - Attributes: - name (str): - Required. The - `project `__ - on which to execute the request. The format is: 4 - projects/[PROJECT_ID_OR_NUMBER] - metric_descriptor (google.api.metric_pb2.MetricDescriptor): - Required. The new `custom - metric `__ - descriptor. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - metric_descriptor = proto.Field( - proto.MESSAGE, - number=2, - message=metric_pb2.MetricDescriptor, - ) - - -class DeleteMetricDescriptorRequest(proto.Message): - r"""The ``DeleteMetricDescriptor`` request. - Attributes: - name (str): - Required. The metric descriptor on which to execute the - request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/metricDescriptors/[METRIC_ID] - - An example of ``[METRIC_ID]`` is: - ``"custom.googleapis.com/my_test_metric"``. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - - -class ListTimeSeriesRequest(proto.Message): - r"""The ``ListTimeSeries`` request. - Attributes: - name (str): - Required. The - `project `__, - organization or folder on which to execute the request. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - organizations/[ORGANIZATION_ID] - folders/[FOLDER_ID] - filter (str): - Required. A `monitoring - filter `__ - that specifies which time series should be returned. The - filter must specify a single metric type, and can - additionally specify metric labels and other information. - For example: - - :: - - metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND - metric.labels.instance_name = "my-instance-name". - interval (google.cloud.monitoring_v3.types.TimeInterval): - Required. The time interval for which results - should be returned. Only time series that - contain data points in the specified interval - are included in the response. - aggregation (google.cloud.monitoring_v3.types.Aggregation): - Specifies the alignment of data points in individual time - series as well as how to combine the retrieved time series - across specified labels. - - By default (if no ``aggregation`` is explicitly specified), - the raw time series data is returned. - secondary_aggregation (google.cloud.monitoring_v3.types.Aggregation): - Apply a second aggregation after ``aggregation`` is applied. - May only be specified if ``aggregation`` is specified. - order_by (str): - Unsupported: must be left blank. The points - in each time series are currently returned in - reverse time order (most recent to oldest). - view (google.cloud.monitoring_v3.types.ListTimeSeriesRequest.TimeSeriesView): - Required. Specifies which information is - returned about the time series. - page_size (int): - A positive number that is the maximum number of results to - return. If ``page_size`` is empty or more than 100,000 - results, the effective ``page_size`` is 100,000 results. If - ``view`` is set to ``FULL``, this is the maximum number of - ``Points`` returned. If ``view`` is set to ``HEADERS``, this - is the maximum number of ``TimeSeries`` returned. - page_token (str): - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """ - class TimeSeriesView(proto.Enum): - r"""Controls which fields are returned by ``ListTimeSeries``.""" - FULL = 0 - HEADERS = 1 - - name = proto.Field( - proto.STRING, - number=10, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - interval = proto.Field( - proto.MESSAGE, - number=4, - message=common.TimeInterval, - ) - aggregation = proto.Field( - proto.MESSAGE, - number=5, - message=common.Aggregation, - ) - secondary_aggregation = proto.Field( - proto.MESSAGE, - number=11, - message=common.Aggregation, - ) - order_by = proto.Field( - proto.STRING, - number=6, - ) - view = proto.Field( - proto.ENUM, - number=7, - enum=TimeSeriesView, - ) - page_size = proto.Field( - proto.INT32, - number=8, - ) - page_token = proto.Field( - proto.STRING, - number=9, - ) - - -class ListTimeSeriesResponse(proto.Message): - r"""The ``ListTimeSeries`` response. - Attributes: - time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): - One or more time series that match the filter - included in the request. - next_page_token (str): - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call - to this method. - execution_errors (Sequence[google.rpc.status_pb2.Status]): - Query execution errors that may have caused - the time series data returned to be incomplete. - unit (str): - The unit in which all ``time_series`` point values are - reported. ``unit`` follows the UCUM format for units as seen - in https://unitsofmeasure.org/ucum.html. If different - ``time_series`` have different units (for example, because - they come from different metric types, or a unit is absent), - then ``unit`` will be "{not_a_unit}". - """ - - @property - def raw_page(self): - return self - - time_series = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gm_metric.TimeSeries, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - execution_errors = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=status_pb2.Status, - ) - unit = proto.Field( - proto.STRING, - number=5, - ) - - -class CreateTimeSeriesRequest(proto.Message): - r"""The ``CreateTimeSeries`` request. - Attributes: - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): - Required. The new data to be added to a list of time series. - Adds at most one data point to each of several time series. - The new data point must be more recent than any other point - in its time series. Each ``TimeSeries`` value must fully - specify a unique time series by supplying all label values - for the metric and the monitored resource. - - The maximum number of ``TimeSeries`` objects per ``Create`` - request is 200. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - time_series = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gm_metric.TimeSeries, - ) - - -class CreateTimeSeriesError(proto.Message): - r"""DEPRECATED. Used to hold per-time-series error status. - Attributes: - time_series (google.cloud.monitoring_v3.types.TimeSeries): - DEPRECATED. Time series ID that resulted in the ``status`` - error. - status (google.rpc.status_pb2.Status): - DEPRECATED. The status of the requested write operation for - ``time_series``. - """ - - time_series = proto.Field( - proto.MESSAGE, - number=1, - message=gm_metric.TimeSeries, - ) - status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -class CreateTimeSeriesSummary(proto.Message): - r"""Summary of the result of a failed request to write data to a - time series. - - Attributes: - total_point_count (int): - The number of points in the request. - success_point_count (int): - The number of points that were successfully - written. - errors (Sequence[google.cloud.monitoring_v3.types.CreateTimeSeriesSummary.Error]): - The number of points that failed to be - written. Order is not guaranteed. - """ - - class Error(proto.Message): - r"""Detailed information about an error category. - Attributes: - status (google.rpc.status_pb2.Status): - The status of the requested write operation. - point_count (int): - The number of points that couldn't be written because of - ``status``. - """ - - status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - point_count = proto.Field( - proto.INT32, - number=2, - ) - - total_point_count = proto.Field( - proto.INT32, - number=1, - ) - success_point_count = proto.Field( - proto.INT32, - number=2, - ) - errors = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Error, - ) - - -class QueryTimeSeriesRequest(proto.Message): - r"""The ``QueryTimeSeries`` request. - Attributes: - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - query (str): - Required. The query in the `Monitoring Query - Language `__ - format. The default time zone is in UTC. - page_size (int): - A positive number that is the maximum number of - time_series_data to return. - page_token (str): - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - query = proto.Field( - proto.STRING, - number=7, - ) - page_size = proto.Field( - proto.INT32, - number=9, - ) - page_token = proto.Field( - proto.STRING, - number=10, - ) - - -class QueryTimeSeriesResponse(proto.Message): - r"""The ``QueryTimeSeries`` response. - Attributes: - time_series_descriptor (google.cloud.monitoring_v3.types.TimeSeriesDescriptor): - The descriptor for the time series data. - time_series_data (Sequence[google.cloud.monitoring_v3.types.TimeSeriesData]): - The time series data. - next_page_token (str): - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call - to this method. - partial_errors (Sequence[google.rpc.status_pb2.Status]): - Query execution errors that may have caused - the time series data returned to be incomplete. - The available data will be available in the - response. - """ - - @property - def raw_page(self): - return self - - time_series_descriptor = proto.Field( - proto.MESSAGE, - number=8, - message=gm_metric.TimeSeriesDescriptor, - ) - time_series_data = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=gm_metric.TimeSeriesData, - ) - next_page_token = proto.Field( - proto.STRING, - number=10, - ) - partial_errors = proto.RepeatedField( - proto.MESSAGE, - number=11, - message=status_pb2.Status, - ) - - -class QueryErrorList(proto.Message): - r"""This is an error detail intended to be used with INVALID_ARGUMENT - errors. - - Attributes: - errors (Sequence[google.cloud.monitoring_v3.types.QueryError]): - Errors in parsing the time series query - language text. The number of errors in the - response may be limited. - error_summary (str): - A summary of all the errors. - """ - - errors = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gm_metric.QueryError, - ) - error_summary = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/mutation_record.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/mutation_record.py deleted file mode 100644 index 2739618c..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/mutation_record.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'MutationRecord', - }, -) - - -class MutationRecord(proto.Message): - r"""Describes a change made to a configuration. - Attributes: - mutate_time (google.protobuf.timestamp_pb2.Timestamp): - When the change occurred. - mutated_by (str): - The email address of the user making the - change. - """ - - mutate_time = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - mutated_by = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification.py deleted file mode 100644 index b4f8fdf1..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification.py +++ /dev/null @@ -1,256 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.api import label_pb2 # type: ignore -from google.api import launch_stage_pb2 # type: ignore -from google.cloud.monitoring_v3.types import mutation_record -from google.protobuf import wrappers_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'NotificationChannelDescriptor', - 'NotificationChannel', - }, -) - - -class NotificationChannelDescriptor(proto.Message): - r"""A description of a notification channel. The descriptor - includes the properties of the channel and the set of labels or - fields that must be specified to configure channels of a given - type. - - Attributes: - name (str): - The full REST resource name for this descriptor. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannelDescriptors/[TYPE] - - In the above, ``[TYPE]`` is the value of the ``type`` field. - type_ (str): - The type of notification channel, such as "email" and "sms". - To view the full list of channels, see `Channel - descriptors `__. - Notification channel types are globally unique. - display_name (str): - A human-readable name for the notification - channel type. This form of the name is suitable - for a user interface. - description (str): - A human-readable description of the - notification channel type. The description may - include a description of the properties of the - channel and pointers to external documentation. - labels (Sequence[google.api.label_pb2.LabelDescriptor]): - The set of labels that must be defined to - identify a particular channel of the - corresponding type. Each label includes a - description for how that field should be - populated. - launch_stage (google.api.launch_stage_pb2.LaunchStage): - The product launch stage for channels of this - type. - """ - - name = proto.Field( - proto.STRING, - number=6, - ) - type_ = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - description = proto.Field( - proto.STRING, - number=3, - ) - labels = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=label_pb2.LabelDescriptor, - ) - launch_stage = proto.Field( - proto.ENUM, - number=7, - enum=launch_stage_pb2.LaunchStage, - ) - - -class NotificationChannel(proto.Message): - r"""A ``NotificationChannel`` is a medium through which an alert is - delivered when a policy violation is detected. Examples of channels - include email, SMS, and third-party messaging applications. Fields - containing sensitive information like authentication tokens or - contact info are only partially populated on retrieval. - - Attributes: - type_ (str): - The type of the notification channel. This field matches the - value of the - [NotificationChannelDescriptor.type][google.monitoring.v3.NotificationChannelDescriptor.type] - field. - name (str): - The full REST resource name for this channel. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] - - The ``[CHANNEL_ID]`` is automatically assigned by the server - on creation. - display_name (str): - An optional human-readable name for this - notification channel. It is recommended that you - specify a non-empty and unique name in order to - make it easier to identify the channels in your - project, though this is not enforced. The - display name is limited to 512 Unicode - characters. - description (str): - An optional human-readable description of - this notification channel. This description may - provide additional details, beyond the display - name, for the channel. This may not exceed 1024 - Unicode characters. - labels (Sequence[google.cloud.monitoring_v3.types.NotificationChannel.LabelsEntry]): - Configuration fields that define the channel and its - behavior. The permissible and required labels are specified - in the - [NotificationChannelDescriptor.labels][google.monitoring.v3.NotificationChannelDescriptor.labels] - of the ``NotificationChannelDescriptor`` corresponding to - the ``type`` field. - user_labels (Sequence[google.cloud.monitoring_v3.types.NotificationChannel.UserLabelsEntry]): - User-supplied key/value data that does not need to conform - to the corresponding ``NotificationChannelDescriptor``'s - schema, unlike the ``labels`` field. This field is intended - to be used for organizing and identifying the - ``NotificationChannel`` objects. - - The field can contain up to 64 entries. Each key and value - is limited to 63 Unicode characters or 128 bytes, whichever - is smaller. Labels and values can contain only lowercase - letters, numerals, underscores, and dashes. Keys must begin - with a letter. - verification_status (google.cloud.monitoring_v3.types.NotificationChannel.VerificationStatus): - Indicates whether this channel has been verified or not. On - a - [``ListNotificationChannels``][google.monitoring.v3.NotificationChannelService.ListNotificationChannels] - or - [``GetNotificationChannel``][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] - operation, this field is expected to be populated. - - If the value is ``UNVERIFIED``, then it indicates that the - channel is non-functioning (it both requires verification - and lacks verification); otherwise, it is assumed that the - channel works. - - If the channel is neither ``VERIFIED`` nor ``UNVERIFIED``, - it implies that the channel is of a type that does not - require verification or that this specific channel has been - exempted from verification because it was created prior to - verification being required for channels of this type. - - This field cannot be modified using a standard - [``UpdateNotificationChannel``][google.monitoring.v3.NotificationChannelService.UpdateNotificationChannel] - operation. To change the value of this field, you must call - [``VerifyNotificationChannel``][google.monitoring.v3.NotificationChannelService.VerifyNotificationChannel]. - enabled (google.protobuf.wrappers_pb2.BoolValue): - Whether notifications are forwarded to the - described channel. This makes it possible to - disable delivery of notifications to a - particular channel without removing the channel - from all alerting policies that reference the - channel. This is a more convenient approach when - the change is temporary and you want to receive - notifications from the same set of alerting - policies on the channel at some point in the - future. - creation_record (google.cloud.monitoring_v3.types.MutationRecord): - Record of the creation of this channel. - mutation_records (Sequence[google.cloud.monitoring_v3.types.MutationRecord]): - Records of the modification of this channel. - """ - class VerificationStatus(proto.Enum): - r"""Indicates whether the channel has been verified or not. It is - illegal to specify this field in a - [``CreateNotificationChannel``][google.monitoring.v3.NotificationChannelService.CreateNotificationChannel] - or an - [``UpdateNotificationChannel``][google.monitoring.v3.NotificationChannelService.UpdateNotificationChannel] - operation. - """ - VERIFICATION_STATUS_UNSPECIFIED = 0 - UNVERIFIED = 1 - VERIFIED = 2 - - type_ = proto.Field( - proto.STRING, - number=1, - ) - name = proto.Field( - proto.STRING, - number=6, - ) - display_name = proto.Field( - proto.STRING, - number=3, - ) - description = proto.Field( - proto.STRING, - number=4, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - user_labels = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - verification_status = proto.Field( - proto.ENUM, - number=9, - enum=VerificationStatus, - ) - enabled = proto.Field( - proto.MESSAGE, - number=11, - message=wrappers_pb2.BoolValue, - ) - creation_record = proto.Field( - proto.MESSAGE, - number=12, - message=mutation_record.MutationRecord, - ) - mutation_records = proto.RepeatedField( - proto.MESSAGE, - number=13, - message=mutation_record.MutationRecord, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification_service.py deleted file mode 100644 index 418262f2..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/notification_service.py +++ /dev/null @@ -1,445 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.monitoring_v3.types import notification -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'ListNotificationChannelDescriptorsRequest', - 'ListNotificationChannelDescriptorsResponse', - 'GetNotificationChannelDescriptorRequest', - 'CreateNotificationChannelRequest', - 'ListNotificationChannelsRequest', - 'ListNotificationChannelsResponse', - 'GetNotificationChannelRequest', - 'UpdateNotificationChannelRequest', - 'DeleteNotificationChannelRequest', - 'SendNotificationChannelVerificationCodeRequest', - 'GetNotificationChannelVerificationCodeRequest', - 'GetNotificationChannelVerificationCodeResponse', - 'VerifyNotificationChannelRequest', - }, -) - - -class ListNotificationChannelDescriptorsRequest(proto.Message): - r"""The ``ListNotificationChannelDescriptors`` request. - Attributes: - name (str): - Required. The REST resource name of the parent from which to - retrieve the notification channel descriptors. The expected - syntax is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - Note that this - `names `__ - the parent container in which to look for the descriptors; - to retrieve a single descriptor by name, use the - [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] - operation, instead. - page_size (int): - The maximum number of results to return in a - single response. If not set to a positive - number, a reasonable value will be chosen by the - service. - page_token (str): - If non-empty, ``page_token`` must contain a value returned - as the ``next_page_token`` in a previous response to request - the next set of results. - """ - - name = proto.Field( - proto.STRING, - number=4, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class ListNotificationChannelDescriptorsResponse(proto.Message): - r"""The ``ListNotificationChannelDescriptors`` response. - Attributes: - channel_descriptors (Sequence[google.cloud.monitoring_v3.types.NotificationChannelDescriptor]): - The monitored resource descriptors supported - for the specified project, optionally filtered. - next_page_token (str): - If not empty, indicates that there may be more results that - match the request. Use the value in the ``page_token`` field - in a subsequent request to fetch the next set of results. If - empty, all results have been returned. - """ - - @property - def raw_page(self): - return self - - channel_descriptors = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=notification.NotificationChannelDescriptor, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetNotificationChannelDescriptorRequest(proto.Message): - r"""The ``GetNotificationChannelDescriptor`` response. - Attributes: - name (str): - Required. The channel type for which to execute the request. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannelDescriptors/[CHANNEL_TYPE] - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - - -class CreateNotificationChannelRequest(proto.Message): - r"""The ``CreateNotificationChannel`` request. - Attributes: - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This names the container into which the channel will be - written, this does not name the newly created channel. The - resulting channel's name will have a normalized version of - this field as a prefix, but will add - ``/notificationChannels/[CHANNEL_ID]`` to identify the - channel. - notification_channel (google.cloud.monitoring_v3.types.NotificationChannel): - Required. The definition of the ``NotificationChannel`` to - create. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - notification_channel = proto.Field( - proto.MESSAGE, - number=2, - message=notification.NotificationChannel, - ) - - -class ListNotificationChannelsRequest(proto.Message): - r"""The ``ListNotificationChannels`` request. - Attributes: - name (str): - Required. The - `project `__ - on which to execute the request. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - This names the container in which to look for the - notification channels; it does not name a specific channel. - To query a specific channel by REST resource name, use the - [``GetNotificationChannel``][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] - operation. - filter (str): - If provided, this field specifies the criteria that must be - met by notification channels to be included in the response. - - For more details, see `sorting and - filtering `__. - order_by (str): - A comma-separated list of fields by which to sort the - result. Supports the same set of fields as in ``filter``. - Entries can be prefixed with a minus sign to sort in - descending rather than ascending order. - - For more details, see `sorting and - filtering `__. - page_size (int): - The maximum number of results to return in a - single response. If not set to a positive - number, a reasonable value will be chosen by the - service. - page_token (str): - If non-empty, ``page_token`` must contain a value returned - as the ``next_page_token`` in a previous response to request - the next set of results. - """ - - name = proto.Field( - proto.STRING, - number=5, - ) - filter = proto.Field( - proto.STRING, - number=6, - ) - order_by = proto.Field( - proto.STRING, - number=7, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListNotificationChannelsResponse(proto.Message): - r"""The ``ListNotificationChannels`` response. - Attributes: - notification_channels (Sequence[google.cloud.monitoring_v3.types.NotificationChannel]): - The notification channels defined for the - specified project. - next_page_token (str): - If not empty, indicates that there may be more results that - match the request. Use the value in the ``page_token`` field - in a subsequent request to fetch the next set of results. If - empty, all results have been returned. - total_size (int): - The total number of notification channels in - all pages. This number is only an estimate, and - may change in subsequent pages. - https://aip.dev/158 - """ - - @property - def raw_page(self): - return self - - notification_channels = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=notification.NotificationChannel, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - total_size = proto.Field( - proto.INT32, - number=4, - ) - - -class GetNotificationChannelRequest(proto.Message): - r"""The ``GetNotificationChannel`` request. - Attributes: - name (str): - Required. The channel for which to execute the request. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - - -class UpdateNotificationChannelRequest(proto.Message): - r"""The ``UpdateNotificationChannel`` request. - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update. - notification_channel (google.cloud.monitoring_v3.types.NotificationChannel): - Required. A description of the changes to be applied to the - specified notification channel. The description must provide - a definition for fields to be updated; the names of these - fields should also be included in the ``update_mask``. - """ - - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - notification_channel = proto.Field( - proto.MESSAGE, - number=3, - message=notification.NotificationChannel, - ) - - -class DeleteNotificationChannelRequest(proto.Message): - r"""The ``DeleteNotificationChannel`` request. - Attributes: - name (str): - Required. The channel for which to execute the request. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] - force (bool): - If true, the notification channel will be - deleted regardless of its use in alert policies - (the policies will be updated to remove the - channel). If false, channels that are still - referenced by an existing alerting policy will - fail to be deleted in a delete operation. - """ - - name = proto.Field( - proto.STRING, - number=3, - ) - force = proto.Field( - proto.BOOL, - number=5, - ) - - -class SendNotificationChannelVerificationCodeRequest(proto.Message): - r"""The ``SendNotificationChannelVerificationCode`` request. - Attributes: - name (str): - Required. The notification channel to which - to send a verification code. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class GetNotificationChannelVerificationCodeRequest(proto.Message): - r"""The ``GetNotificationChannelVerificationCode`` request. - Attributes: - name (str): - Required. The notification channel for which - a verification code is to be generated and - retrieved. This must name a channel that is - already verified; if the specified channel is - not verified, the request will fail. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - The desired expiration time. If specified, - the API will guarantee that the returned code - will not be valid after the specified timestamp; - however, the API cannot guarantee that the - returned code will be valid for at least as long - as the requested time (the API puts an upper - bound on the amount of time for which a code may - be valid). If omitted, a default expiration will - be used, which may be less than the max - permissible expiration (so specifying an - expiration may extend the code's lifetime over - omitting an expiration, even though the API does - impose an upper limit on the maximum expiration - that is permitted). - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - expire_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class GetNotificationChannelVerificationCodeResponse(proto.Message): - r"""The ``GetNotificationChannelVerificationCode`` request. - Attributes: - code (str): - The verification code, which may be used to - verify other channels that have an equivalent - identity (i.e. other channels of the same type - with the same fingerprint such as other email - channels with the same email address or other - sms channels with the same number). - expire_time (google.protobuf.timestamp_pb2.Timestamp): - The expiration time associated with the code - that was returned. If an expiration was provided - in the request, this is the minimum of the - requested expiration in the request and the max - permitted expiration. - """ - - code = proto.Field( - proto.STRING, - number=1, - ) - expire_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class VerifyNotificationChannelRequest(proto.Message): - r"""The ``VerifyNotificationChannel`` request. - Attributes: - name (str): - Required. The notification channel to verify. - code (str): - Required. The verification code that was delivered to the - channel as a result of invoking the - ``SendNotificationChannelVerificationCode`` API method or - that was retrieved from a verified channel via - ``GetNotificationChannelVerificationCode``. For example, one - might have "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" (in - general, one is only guaranteed that the code is valid - UTF-8; one should not make any assumptions regarding the - structure or format of the code). - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - code = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/query_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/query_service.py deleted file mode 100644 index 0ea6c2d0..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/query_service.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - }, -) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service.py deleted file mode 100644 index 3a590100..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service.py +++ /dev/null @@ -1,775 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.type import calendar_period_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'Service', - 'ServiceLevelObjective', - 'ServiceLevelIndicator', - 'BasicSli', - 'Range', - 'RequestBasedSli', - 'TimeSeriesRatio', - 'DistributionCut', - 'WindowsBasedSli', - }, -) - - -class Service(proto.Message): - r"""A ``Service`` is a discrete, autonomous, and network-accessible - unit, designed to solve an individual concern - (`Wikipedia `__). - In Cloud Monitoring, a ``Service`` acts as the root resource under - which operational aspects of the service are accessible. - - Attributes: - name (str): - Resource name for this Service. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - display_name (str): - Name used for UI elements listing this - Service. - custom (google.cloud.monitoring_v3.types.Service.Custom): - Custom service type. - app_engine (google.cloud.monitoring_v3.types.Service.AppEngine): - Type used for App Engine services. - cloud_endpoints (google.cloud.monitoring_v3.types.Service.CloudEndpoints): - Type used for Cloud Endpoints services. - cluster_istio (google.cloud.monitoring_v3.types.Service.ClusterIstio): - Type used for Istio services that live in a - Kubernetes cluster. - mesh_istio (google.cloud.monitoring_v3.types.Service.MeshIstio): - Type used for Istio services scoped to an - Istio mesh. - istio_canonical_service (google.cloud.monitoring_v3.types.Service.IstioCanonicalService): - Type used for canonical services scoped to an Istio mesh. - Metrics for Istio are `documented - here `__ - telemetry (google.cloud.monitoring_v3.types.Service.Telemetry): - Configuration for how to query telemetry on a - Service. - user_labels (Sequence[google.cloud.monitoring_v3.types.Service.UserLabelsEntry]): - Labels which have been used to annotate the - service. Label keys must start with a letter. - Label keys and values may contain lowercase - letters, numbers, underscores, and dashes. Label - keys and values have a maximum length of 63 - characters, and must be less than 128 bytes in - size. Up to 64 label entries may be stored. For - labels which do not have a semantic value, the - empty string may be supplied for the label - value. - """ - - class Custom(proto.Message): - r"""Custom view of service telemetry. Currently a place-holder - pending final design. - """ - - class AppEngine(proto.Message): - r"""App Engine service. Learn more at - https://cloud.google.com/appengine. - - Attributes: - module_id (str): - The ID of the App Engine module underlying this service. - Corresponds to the ``module_id`` resource label in the - ``gae_app`` monitored resource: - https://cloud.google.com/monitoring/api/resources#tag_gae_app - """ - - module_id = proto.Field( - proto.STRING, - number=1, - ) - - class CloudEndpoints(proto.Message): - r"""Cloud Endpoints service. Learn more at - https://cloud.google.com/endpoints. - - Attributes: - service (str): - The name of the Cloud Endpoints service underlying this - service. Corresponds to the ``service`` resource label in - the ``api`` monitored resource: - https://cloud.google.com/monitoring/api/resources#tag_api - """ - - service = proto.Field( - proto.STRING, - number=1, - ) - - class ClusterIstio(proto.Message): - r"""Istio service scoped to a single Kubernetes cluster. Learn - more at https://istio.io. Clusters running OSS Istio will have - their services ingested as this type. - - Attributes: - location (str): - The location of the Kubernetes cluster in which this Istio - service is defined. Corresponds to the ``location`` resource - label in ``k8s_cluster`` resources. - cluster_name (str): - The name of the Kubernetes cluster in which this Istio - service is defined. Corresponds to the ``cluster_name`` - resource label in ``k8s_cluster`` resources. - service_namespace (str): - The namespace of the Istio service underlying this service. - Corresponds to the ``destination_service_namespace`` metric - label in Istio metrics. - service_name (str): - The name of the Istio service underlying this service. - Corresponds to the ``destination_service_name`` metric label - in Istio metrics. - """ - - location = proto.Field( - proto.STRING, - number=1, - ) - cluster_name = proto.Field( - proto.STRING, - number=2, - ) - service_namespace = proto.Field( - proto.STRING, - number=3, - ) - service_name = proto.Field( - proto.STRING, - number=4, - ) - - class MeshIstio(proto.Message): - r"""Istio service scoped to an Istio mesh. Anthos clusters - running ASM < 1.6.8 will have their services ingested as this - type. - - Attributes: - mesh_uid (str): - Identifier for the mesh in which this Istio service is - defined. Corresponds to the ``mesh_uid`` metric label in - Istio metrics. - service_namespace (str): - The namespace of the Istio service underlying this service. - Corresponds to the ``destination_service_namespace`` metric - label in Istio metrics. - service_name (str): - The name of the Istio service underlying this service. - Corresponds to the ``destination_service_name`` metric label - in Istio metrics. - """ - - mesh_uid = proto.Field( - proto.STRING, - number=1, - ) - service_namespace = proto.Field( - proto.STRING, - number=3, - ) - service_name = proto.Field( - proto.STRING, - number=4, - ) - - class IstioCanonicalService(proto.Message): - r"""Canonical service scoped to an Istio mesh. Anthos clusters - running ASM >= 1.6.8 will have their services ingested as this - type. - - Attributes: - mesh_uid (str): - Identifier for the Istio mesh in which this canonical - service is defined. Corresponds to the ``mesh_uid`` metric - label in `Istio - metrics `__. - canonical_service_namespace (str): - The namespace of the canonical service underlying this - service. Corresponds to the - ``destination_canonical_service_namespace`` metric label in - `Istio - metrics `__. - canonical_service (str): - The name of the canonical service underlying this service. - Corresponds to the ``destination_canonical_service_name`` - metric label in label in `Istio - metrics `__. - """ - - mesh_uid = proto.Field( - proto.STRING, - number=1, - ) - canonical_service_namespace = proto.Field( - proto.STRING, - number=3, - ) - canonical_service = proto.Field( - proto.STRING, - number=4, - ) - - class Telemetry(proto.Message): - r"""Configuration for how to query telemetry on a Service. - Attributes: - resource_name (str): - The full name of the resource that defines this service. - Formatted as described in - https://cloud.google.com/apis/design/resource_names. - """ - - resource_name = proto.Field( - proto.STRING, - number=1, - ) - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - custom = proto.Field( - proto.MESSAGE, - number=6, - oneof='identifier', - message=Custom, - ) - app_engine = proto.Field( - proto.MESSAGE, - number=7, - oneof='identifier', - message=AppEngine, - ) - cloud_endpoints = proto.Field( - proto.MESSAGE, - number=8, - oneof='identifier', - message=CloudEndpoints, - ) - cluster_istio = proto.Field( - proto.MESSAGE, - number=9, - oneof='identifier', - message=ClusterIstio, - ) - mesh_istio = proto.Field( - proto.MESSAGE, - number=10, - oneof='identifier', - message=MeshIstio, - ) - istio_canonical_service = proto.Field( - proto.MESSAGE, - number=11, - oneof='identifier', - message=IstioCanonicalService, - ) - telemetry = proto.Field( - proto.MESSAGE, - number=13, - message=Telemetry, - ) - user_labels = proto.MapField( - proto.STRING, - proto.STRING, - number=14, - ) - - -class ServiceLevelObjective(proto.Message): - r"""A Service-Level Objective (SLO) describes a level of desired - good service. It consists of a service-level indicator (SLI), a - performance goal, and a period over which the objective is to be - evaluated against that goal. The SLO can use SLIs defined in a - number of different manners. Typical SLOs might include "99% of - requests in each rolling week have latency below 200 - milliseconds" or "99.5% of requests in each calendar month - return successfully." - - Attributes: - name (str): - Resource name for this ``ServiceLevelObjective``. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] - display_name (str): - Name used for UI elements listing this SLO. - service_level_indicator (google.cloud.monitoring_v3.types.ServiceLevelIndicator): - The definition of good service, used to measure and - calculate the quality of the ``Service``'s performance with - respect to a single aspect of service quality. - goal (float): - The fraction of service that must be good in order for this - objective to be met. ``0 < goal <= 0.999``. - rolling_period (google.protobuf.duration_pb2.Duration): - A rolling time period, semantically "in the past - ````". Must be an integer multiple of 1 day - no larger than 30 days. - calendar_period (google.type.calendar_period_pb2.CalendarPeriod): - A calendar period, semantically "since the start of the - current ````". At this time, only ``DAY``, - ``WEEK``, ``FORTNIGHT``, and ``MONTH`` are supported. - user_labels (Sequence[google.cloud.monitoring_v3.types.ServiceLevelObjective.UserLabelsEntry]): - Labels which have been used to annotate the - service-level objective. Label keys must start - with a letter. Label keys and values may contain - lowercase letters, numbers, underscores, and - dashes. Label keys and values have a maximum - length of 63 characters, and must be less than - 128 bytes in size. Up to 64 label entries may be - stored. For labels which do not have a semantic - value, the empty string may be supplied for the - label value. - """ - class View(proto.Enum): - r"""``ServiceLevelObjective.View`` determines what form of - ``ServiceLevelObjective`` is returned from - ``GetServiceLevelObjective``, ``ListServiceLevelObjectives``, and - ``ListServiceLevelObjectiveVersions`` RPCs. - """ - VIEW_UNSPECIFIED = 0 - FULL = 2 - EXPLICIT = 1 - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=11, - ) - service_level_indicator = proto.Field( - proto.MESSAGE, - number=3, - message='ServiceLevelIndicator', - ) - goal = proto.Field( - proto.DOUBLE, - number=4, - ) - rolling_period = proto.Field( - proto.MESSAGE, - number=5, - oneof='period', - message=duration_pb2.Duration, - ) - calendar_period = proto.Field( - proto.ENUM, - number=6, - oneof='period', - enum=calendar_period_pb2.CalendarPeriod, - ) - user_labels = proto.MapField( - proto.STRING, - proto.STRING, - number=12, - ) - - -class ServiceLevelIndicator(proto.Message): - r"""A Service-Level Indicator (SLI) describes the "performance" of a - service. For some services, the SLI is well-defined. In such cases, - the SLI can be described easily by referencing the well-known SLI - and providing the needed parameters. Alternatively, a "custom" SLI - can be defined with a query to the underlying metric store. An SLI - is defined to be ``good_service / total_service`` over any queried - time interval. The value of performance always falls into the range - ``0 <= performance <= 1``. A custom SLI describes how to compute - this ratio, whether this is by dividing values from a pair of time - series, cutting a ``Distribution`` into good and bad counts, or - counting time windows in which the service complies with a - criterion. For separation of concerns, a single Service-Level - Indicator measures performance for only one aspect of service - quality, such as fraction of successful queries or fast-enough - queries. - - Attributes: - basic_sli (google.cloud.monitoring_v3.types.BasicSli): - Basic SLI on a well-known service type. - request_based (google.cloud.monitoring_v3.types.RequestBasedSli): - Request-based SLIs - windows_based (google.cloud.monitoring_v3.types.WindowsBasedSli): - Windows-based SLIs - """ - - basic_sli = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message='BasicSli', - ) - request_based = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='RequestBasedSli', - ) - windows_based = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='WindowsBasedSli', - ) - - -class BasicSli(proto.Message): - r"""An SLI measuring performance on a well-known service type. - Performance will be computed on the basis of pre-defined metrics. - The type of the ``service_resource`` determines the metrics to use - and the ``service_resource.labels`` and ``metric_labels`` are used - to construct a monitoring filter to filter that metric down to just - the data relevant to this service. - - Attributes: - method (Sequence[str]): - OPTIONAL: The set of RPCs to which this SLI - is relevant. Telemetry from other methods will - not be used to calculate performance for this - SLI. If omitted, this SLI applies to all the - Service's methods. For service types that don't - support breaking down by method, setting this - field will result in an error. - location (Sequence[str]): - OPTIONAL: The set of locations to which this - SLI is relevant. Telemetry from other locations - will not be used to calculate performance for - this SLI. If omitted, this SLI applies to all - locations in which the Service has activity. For - service types that don't support breaking down - by location, setting this field will result in - an error. - version (Sequence[str]): - OPTIONAL: The set of API versions to which - this SLI is relevant. Telemetry from other API - versions will not be used to calculate - performance for this SLI. If omitted, this SLI - applies to all API versions. For service types - that don't support breaking down by version, - setting this field will result in an error. - availability (google.cloud.monitoring_v3.types.BasicSli.AvailabilityCriteria): - Good service is defined to be the count of - requests made to this service that return - successfully. - latency (google.cloud.monitoring_v3.types.BasicSli.LatencyCriteria): - Good service is defined to be the count of requests made to - this service that are fast enough with respect to - ``latency.threshold``. - """ - - class AvailabilityCriteria(proto.Message): - r"""Future parameters for the availability SLI. """ - - class LatencyCriteria(proto.Message): - r"""Parameters for a latency threshold SLI. - Attributes: - threshold (google.protobuf.duration_pb2.Duration): - Good service is defined to be the count of requests made to - this service that return in no more than ``threshold``. - """ - - threshold = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - - method = proto.RepeatedField( - proto.STRING, - number=7, - ) - location = proto.RepeatedField( - proto.STRING, - number=8, - ) - version = proto.RepeatedField( - proto.STRING, - number=9, - ) - availability = proto.Field( - proto.MESSAGE, - number=2, - oneof='sli_criteria', - message=AvailabilityCriteria, - ) - latency = proto.Field( - proto.MESSAGE, - number=3, - oneof='sli_criteria', - message=LatencyCriteria, - ) - - -class Range(proto.Message): - r"""Range of numerical values within ``min`` and ``max``. - Attributes: - min_ (float): - Range minimum. - max_ (float): - Range maximum. - """ - - min_ = proto.Field( - proto.DOUBLE, - number=1, - ) - max_ = proto.Field( - proto.DOUBLE, - number=2, - ) - - -class RequestBasedSli(proto.Message): - r"""Service Level Indicators for which atomic units of service - are counted directly. - - Attributes: - good_total_ratio (google.cloud.monitoring_v3.types.TimeSeriesRatio): - ``good_total_ratio`` is used when the ratio of - ``good_service`` to ``total_service`` is computed from two - ``TimeSeries``. - distribution_cut (google.cloud.monitoring_v3.types.DistributionCut): - ``distribution_cut`` is used when ``good_service`` is a - count of values aggregated in a ``Distribution`` that fall - into a good range. The ``total_service`` is the total count - of all values aggregated in the ``Distribution``. - """ - - good_total_ratio = proto.Field( - proto.MESSAGE, - number=1, - oneof='method', - message='TimeSeriesRatio', - ) - distribution_cut = proto.Field( - proto.MESSAGE, - number=3, - oneof='method', - message='DistributionCut', - ) - - -class TimeSeriesRatio(proto.Message): - r"""A ``TimeSeriesRatio`` specifies two ``TimeSeries`` to use for - computing the ``good_service / total_service`` ratio. The specified - ``TimeSeries`` must have ``ValueType = DOUBLE`` or - ``ValueType = INT64`` and must have ``MetricKind = DELTA`` or - ``MetricKind = CUMULATIVE``. The ``TimeSeriesRatio`` must specify - exactly two of good, bad, and total, and the relationship - ``good_service + bad_service = total_service`` will be assumed. - - Attributes: - good_service_filter (str): - A `monitoring - filter `__ - specifying a ``TimeSeries`` quantifying good service - provided. Must have ``ValueType = DOUBLE`` or - ``ValueType = INT64`` and must have ``MetricKind = DELTA`` - or ``MetricKind = CUMULATIVE``. - bad_service_filter (str): - A `monitoring - filter `__ - specifying a ``TimeSeries`` quantifying bad service, either - demanded service that was not provided or demanded service - that was of inadequate quality. Must have - ``ValueType = DOUBLE`` or ``ValueType = INT64`` and must - have ``MetricKind = DELTA`` or ``MetricKind = CUMULATIVE``. - total_service_filter (str): - A `monitoring - filter `__ - specifying a ``TimeSeries`` quantifying total demanded - service. Must have ``ValueType = DOUBLE`` or - ``ValueType = INT64`` and must have ``MetricKind = DELTA`` - or ``MetricKind = CUMULATIVE``. - """ - - good_service_filter = proto.Field( - proto.STRING, - number=4, - ) - bad_service_filter = proto.Field( - proto.STRING, - number=5, - ) - total_service_filter = proto.Field( - proto.STRING, - number=6, - ) - - -class DistributionCut(proto.Message): - r"""A ``DistributionCut`` defines a ``TimeSeries`` and thresholds used - for measuring good service and total service. The ``TimeSeries`` - must have ``ValueType = DISTRIBUTION`` and ``MetricKind = DELTA`` or - ``MetricKind = CUMULATIVE``. The computed ``good_service`` will be - the estimated count of values in the ``Distribution`` that fall - within the specified ``min`` and ``max``. - - Attributes: - distribution_filter (str): - A `monitoring - filter `__ - specifying a ``TimeSeries`` aggregating values. Must have - ``ValueType = DISTRIBUTION`` and ``MetricKind = DELTA`` or - ``MetricKind = CUMULATIVE``. - range_ (google.cloud.monitoring_v3.types.Range): - Range of values considered "good." For a one- - ided range, set one bound to an infinite value. - """ - - distribution_filter = proto.Field( - proto.STRING, - number=4, - ) - range_ = proto.Field( - proto.MESSAGE, - number=5, - message='Range', - ) - - -class WindowsBasedSli(proto.Message): - r"""A ``WindowsBasedSli`` defines ``good_service`` as the count of time - windows for which the provided service was of good quality. Criteria - for determining if service was good are embedded in the - ``window_criterion``. - - Attributes: - good_bad_metric_filter (str): - A `monitoring - filter `__ - specifying a ``TimeSeries`` with ``ValueType = BOOL``. The - window is good if any ``true`` values appear in the window. - good_total_ratio_threshold (google.cloud.monitoring_v3.types.WindowsBasedSli.PerformanceThreshold): - A window is good if its ``performance`` is high enough. - metric_mean_in_range (google.cloud.monitoring_v3.types.WindowsBasedSli.MetricRange): - A window is good if the metric's value is in - a good range, averaged across returned streams. - metric_sum_in_range (google.cloud.monitoring_v3.types.WindowsBasedSli.MetricRange): - A window is good if the metric's value is in - a good range, summed across returned streams. - window_period (google.protobuf.duration_pb2.Duration): - Duration over which window quality is evaluated. Must be an - integer fraction of a day and at least ``60s``. - """ - - class PerformanceThreshold(proto.Message): - r"""A ``PerformanceThreshold`` is used when each window is good when - that window has a sufficiently high ``performance``. - - Attributes: - performance (google.cloud.monitoring_v3.types.RequestBasedSli): - ``RequestBasedSli`` to evaluate to judge window quality. - basic_sli_performance (google.cloud.monitoring_v3.types.BasicSli): - ``BasicSli`` to evaluate to judge window quality. - threshold (float): - If window ``performance >= threshold``, the window is - counted as good. - """ - - performance = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='RequestBasedSli', - ) - basic_sli_performance = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='BasicSli', - ) - threshold = proto.Field( - proto.DOUBLE, - number=2, - ) - - class MetricRange(proto.Message): - r"""A ``MetricRange`` is used when each window is good when the value x - of a single ``TimeSeries`` satisfies - ``range.min <= x <= range.max``. The provided ``TimeSeries`` must - have ``ValueType = INT64`` or ``ValueType = DOUBLE`` and - ``MetricKind = GAUGE``. - - Attributes: - time_series (str): - A `monitoring - filter `__ - specifying the ``TimeSeries`` to use for evaluating window - quality. - range_ (google.cloud.monitoring_v3.types.Range): - Range of values considered "good." For a one- - ided range, set one bound to an infinite value. - """ - - time_series = proto.Field( - proto.STRING, - number=1, - ) - range_ = proto.Field( - proto.MESSAGE, - number=4, - message='Range', - ) - - good_bad_metric_filter = proto.Field( - proto.STRING, - number=5, - oneof='window_criterion', - ) - good_total_ratio_threshold = proto.Field( - proto.MESSAGE, - number=2, - oneof='window_criterion', - message=PerformanceThreshold, - ) - metric_mean_in_range = proto.Field( - proto.MESSAGE, - number=6, - oneof='window_criterion', - message=MetricRange, - ) - metric_sum_in_range = proto.Field( - proto.MESSAGE, - number=7, - oneof='window_criterion', - message=MetricRange, - ) - window_period = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service_service.py deleted file mode 100644 index b7f0c7a1..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/service_service.py +++ /dev/null @@ -1,416 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.monitoring_v3.types import service as gm_service -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'CreateServiceRequest', - 'GetServiceRequest', - 'ListServicesRequest', - 'ListServicesResponse', - 'UpdateServiceRequest', - 'DeleteServiceRequest', - 'CreateServiceLevelObjectiveRequest', - 'GetServiceLevelObjectiveRequest', - 'ListServiceLevelObjectivesRequest', - 'ListServiceLevelObjectivesResponse', - 'UpdateServiceLevelObjectiveRequest', - 'DeleteServiceLevelObjectiveRequest', - }, -) - - -class CreateServiceRequest(proto.Message): - r"""The ``CreateService`` request. - Attributes: - parent (str): - Required. Resource - `name `__ - of the parent workspace. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - service_id (str): - Optional. The Service id to use for this Service. If - omitted, an id will be generated instead. Must match the - pattern ``[a-z0-9\-]+`` - service (google.cloud.monitoring_v3.types.Service): - Required. The ``Service`` to create. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - service_id = proto.Field( - proto.STRING, - number=3, - ) - service = proto.Field( - proto.MESSAGE, - number=2, - message=gm_service.Service, - ) - - -class GetServiceRequest(proto.Message): - r"""The ``GetService`` request. - Attributes: - name (str): - Required. Resource name of the ``Service``. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListServicesRequest(proto.Message): - r"""The ``ListServices`` request. - Attributes: - parent (str): - Required. Resource name of the parent containing the listed - services, either a - `project `__ - or a Monitoring Workspace. The formats are: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - workspaces/[HOST_PROJECT_ID_OR_NUMBER] - filter (str): - A filter specifying what ``Service``\ s to return. The - filter currently supports the following fields: - - :: - - - `identifier_case` - - `app_engine.module_id` - - `cloud_endpoints.service` (reserved for future use) - - `mesh_istio.mesh_uid` - - `mesh_istio.service_namespace` - - `mesh_istio.service_name` - - `cluster_istio.location` (deprecated) - - `cluster_istio.cluster_name` (deprecated) - - `cluster_istio.service_namespace` (deprecated) - - `cluster_istio.service_name` (deprecated) - - ``identifier_case`` refers to which option in the identifier - oneof is populated. For example, the filter - ``identifier_case = "CUSTOM"`` would match all services with - a value for the ``custom`` field. Valid options are - "CUSTOM", "APP_ENGINE", "MESH_ISTIO", plus "CLUSTER_ISTIO" - (deprecated) and "CLOUD_ENDPOINTS" (reserved for future - use). - page_size (int): - A non-negative number that is the maximum - number of results to return. When 0, use default - page size. - page_token (str): - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListServicesResponse(proto.Message): - r"""The ``ListServices`` response. - Attributes: - services (Sequence[google.cloud.monitoring_v3.types.Service]): - The ``Service``\ s matching the specified filter. - next_page_token (str): - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call - to this method. - """ - - @property - def raw_page(self): - return self - - services = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gm_service.Service, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateServiceRequest(proto.Message): - r"""The ``UpdateService`` request. - Attributes: - service (google.cloud.monitoring_v3.types.Service): - Required. The ``Service`` to draw updates from. The given - ``name`` specifies the resource to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A set of field paths defining which fields to - use for the update. - """ - - service = proto.Field( - proto.MESSAGE, - number=1, - message=gm_service.Service, - ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteServiceRequest(proto.Message): - r"""The ``DeleteService`` request. - Attributes: - name (str): - Required. Resource name of the ``Service`` to delete. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateServiceLevelObjectiveRequest(proto.Message): - r"""The ``CreateServiceLevelObjective`` request. - Attributes: - parent (str): - Required. Resource name of the parent ``Service``. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - service_level_objective_id (str): - Optional. The ServiceLevelObjective id to use for this - ServiceLevelObjective. If omitted, an id will be generated - instead. Must match the pattern ``[a-z0-9\-]+`` - service_level_objective (google.cloud.monitoring_v3.types.ServiceLevelObjective): - Required. The ``ServiceLevelObjective`` to create. The - provided ``name`` will be respected if no - ``ServiceLevelObjective`` exists with this name. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - service_level_objective_id = proto.Field( - proto.STRING, - number=3, - ) - service_level_objective = proto.Field( - proto.MESSAGE, - number=2, - message=gm_service.ServiceLevelObjective, - ) - - -class GetServiceLevelObjectiveRequest(proto.Message): - r"""The ``GetServiceLevelObjective`` request. - Attributes: - name (str): - Required. Resource name of the ``ServiceLevelObjective`` to - get. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] - view (google.cloud.monitoring_v3.types.ServiceLevelObjective.View): - View of the ``ServiceLevelObjective`` to return. If - ``DEFAULT``, return the ``ServiceLevelObjective`` as - originally defined. If ``EXPLICIT`` and the - ``ServiceLevelObjective`` is defined in terms of a - ``BasicSli``, replace the ``BasicSli`` with a - ``RequestBasedSli`` spelling out how the SLI is computed. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - view = proto.Field( - proto.ENUM, - number=2, - enum=gm_service.ServiceLevelObjective.View, - ) - - -class ListServiceLevelObjectivesRequest(proto.Message): - r"""The ``ListServiceLevelObjectives`` request. - Attributes: - parent (str): - Required. Resource name of the parent containing the listed - SLOs, either a project or a Monitoring Workspace. The - formats are: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] - workspaces/[HOST_PROJECT_ID_OR_NUMBER]/services/- - filter (str): - A filter specifying what ``ServiceLevelObjective``\ s to - return. - page_size (int): - A non-negative number that is the maximum - number of results to return. When 0, use default - page size. - page_token (str): - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - view (google.cloud.monitoring_v3.types.ServiceLevelObjective.View): - View of the ``ServiceLevelObjective``\ s to return. If - ``DEFAULT``, return each ``ServiceLevelObjective`` as - originally defined. If ``EXPLICIT`` and the - ``ServiceLevelObjective`` is defined in terms of a - ``BasicSli``, replace the ``BasicSli`` with a - ``RequestBasedSli`` spelling out how the SLI is computed. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - view = proto.Field( - proto.ENUM, - number=5, - enum=gm_service.ServiceLevelObjective.View, - ) - - -class ListServiceLevelObjectivesResponse(proto.Message): - r"""The ``ListServiceLevelObjectives`` response. - Attributes: - service_level_objectives (Sequence[google.cloud.monitoring_v3.types.ServiceLevelObjective]): - The ``ServiceLevelObjective``\ s matching the specified - filter. - next_page_token (str): - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call - to this method. - """ - - @property - def raw_page(self): - return self - - service_level_objectives = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gm_service.ServiceLevelObjective, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateServiceLevelObjectiveRequest(proto.Message): - r"""The ``UpdateServiceLevelObjective`` request. - Attributes: - service_level_objective (google.cloud.monitoring_v3.types.ServiceLevelObjective): - Required. The ``ServiceLevelObjective`` to draw updates - from. The given ``name`` specifies the resource to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A set of field paths defining which fields to - use for the update. - """ - - service_level_objective = proto.Field( - proto.MESSAGE, - number=1, - message=gm_service.ServiceLevelObjective, - ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteServiceLevelObjectiveRequest(proto.Message): - r"""The ``DeleteServiceLevelObjective`` request. - Attributes: - name (str): - Required. Resource name of the ``ServiceLevelObjective`` to - delete. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/span_context.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/span_context.py deleted file mode 100644 index f7c37ffa..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/span_context.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'SpanContext', - }, -) - - -class SpanContext(proto.Message): - r"""The context of a span. This is attached to an - [Exemplar][google.api.Distribution.Exemplar] in - [Distribution][google.api.Distribution] values during aggregation. - - It contains the name of a span with format: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/traces/[TRACE_ID]/spans/[SPAN_ID] - - Attributes: - span_name (str): - The resource name of the span. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/traces/[TRACE_ID]/spans/[SPAN_ID] - - ``[TRACE_ID]`` is a unique identifier for a trace within a - project; it is a 32-character hexadecimal encoding of a - 16-byte array. - - ``[SPAN_ID]`` is a unique identifier for a span within a - trace; it is a 16-character hexadecimal encoding of an - 8-byte array. - """ - - span_name = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime.py deleted file mode 100644 index c6310c72..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime.py +++ /dev/null @@ -1,538 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'UptimeCheckRegion', - 'GroupResourceType', - 'InternalChecker', - 'UptimeCheckConfig', - 'UptimeCheckIp', - }, -) - - -class UptimeCheckRegion(proto.Enum): - r"""The regions from which an Uptime check can be run.""" - REGION_UNSPECIFIED = 0 - USA = 1 - EUROPE = 2 - SOUTH_AMERICA = 3 - ASIA_PACIFIC = 4 - - -class GroupResourceType(proto.Enum): - r"""The supported resource types that can be used as values of - ``group_resource.resource_type``. ``INSTANCE`` includes - ``gce_instance`` and ``aws_ec2_instance`` resource types. The - resource types ``gae_app`` and ``uptime_url`` are not valid here - because group checks on App Engine modules and URLs are not allowed. - """ - RESOURCE_TYPE_UNSPECIFIED = 0 - INSTANCE = 1 - AWS_ELB_LOAD_BALANCER = 2 - - -class InternalChecker(proto.Message): - r"""An internal checker allows Uptime checks to run on - private/internal GCP resources. - - Attributes: - name (str): - A unique resource name for this InternalChecker. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/internalCheckers/[INTERNAL_CHECKER_ID] - - ``[PROJECT_ID_OR_NUMBER]`` is the Stackdriver Workspace - project for the Uptime check config associated with the - internal checker. - display_name (str): - The checker's human-readable name. The - display name should be unique within a - Stackdriver Workspace in order to make it easier - to identify; however, uniqueness is not - enforced. - network (str): - The `GCP VPC - network `__ where the - internal resource lives (ex: "default"). - gcp_zone (str): - The GCP zone the Uptime check should egress from. Only - respected for internal Uptime checks, where internal_network - is specified. - peer_project_id (str): - The GCP project ID where the internal checker - lives. Not necessary the same as the Workspace - project. - state (google.cloud.monitoring_v3.types.InternalChecker.State): - The current operational state of the internal - checker. - """ - class State(proto.Enum): - r"""Operational states for an internal checker.""" - UNSPECIFIED = 0 - CREATING = 1 - RUNNING = 2 - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - network = proto.Field( - proto.STRING, - number=3, - ) - gcp_zone = proto.Field( - proto.STRING, - number=4, - ) - peer_project_id = proto.Field( - proto.STRING, - number=6, - ) - state = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - - -class UptimeCheckConfig(proto.Message): - r"""This message configures which resources and services to - monitor for availability. - - Attributes: - name (str): - A unique resource name for this Uptime check configuration. - The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] - - ``[PROJECT_ID_OR_NUMBER]`` is the Workspace host project - associated with the Uptime check. - - This field should be omitted when creating the Uptime check - configuration; on create, the resource name is assigned by - the server and included in the response. - display_name (str): - A human-friendly name for the Uptime check - configuration. The display name should be unique - within a Stackdriver Workspace in order to make - it easier to identify; however, uniqueness is - not enforced. Required. - monitored_resource (google.api.monitored_resource_pb2.MonitoredResource): - The `monitored - resource `__ - associated with the configuration. The following monitored - resource types are valid for this field: ``uptime_url``, - ``gce_instance``, ``gae_app``, ``aws_ec2_instance``, - ``aws_elb_load_balancer`` ``k8s_service`` - resource_group (google.cloud.monitoring_v3.types.UptimeCheckConfig.ResourceGroup): - The group resource associated with the - configuration. - http_check (google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck): - Contains information needed to make an HTTP - or HTTPS check. - tcp_check (google.cloud.monitoring_v3.types.UptimeCheckConfig.TcpCheck): - Contains information needed to make a TCP - check. - period (google.protobuf.duration_pb2.Duration): - How often, in seconds, the Uptime check is performed. - Currently, the only supported values are ``60s`` (1 minute), - ``300s`` (5 minutes), ``600s`` (10 minutes), and ``900s`` - (15 minutes). Optional, defaults to ``60s``. - timeout (google.protobuf.duration_pb2.Duration): - The maximum amount of time to wait for the - request to complete (must be between 1 and 60 - seconds). Required. - content_matchers (Sequence[google.cloud.monitoring_v3.types.UptimeCheckConfig.ContentMatcher]): - The content that is expected to appear in the data returned - by the target server against which the check is run. - Currently, only the first entry in the ``content_matchers`` - list is supported, and additional entries will be ignored. - This field is optional and should only be specified if a - content match is required as part of the/ Uptime check. - selected_regions (Sequence[google.cloud.monitoring_v3.types.UptimeCheckRegion]): - The list of regions from which the check will - be run. Some regions contain one location, and - others contain more than one. If this field is - specified, enough regions must be provided to - include a minimum of 3 locations. Not - specifying this field will result in Uptime - checks running from all available regions. - is_internal (bool): - If this is ``true``, then checks are made only from the - 'internal_checkers'. If it is ``false``, then checks are - made only from the 'selected_regions'. It is an error to - provide 'selected_regions' when is_internal is ``true``, or - to provide 'internal_checkers' when is_internal is - ``false``. - internal_checkers (Sequence[google.cloud.monitoring_v3.types.InternalChecker]): - The internal checkers that this check will egress from. If - ``is_internal`` is ``true`` and this list is empty, the - check will egress from all the InternalCheckers configured - for the project that owns this ``UptimeCheckConfig``. - """ - - class ResourceGroup(proto.Message): - r"""The resource submessage for group checks. It can be used - instead of a monitored resource, when multiple resources are - being monitored. - - Attributes: - group_id (str): - The group of resources being monitored. Should be only the - ``[GROUP_ID]``, and not the full-path - ``projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID]``. - resource_type (google.cloud.monitoring_v3.types.GroupResourceType): - The resource type of the group members. - """ - - group_id = proto.Field( - proto.STRING, - number=1, - ) - resource_type = proto.Field( - proto.ENUM, - number=2, - enum='GroupResourceType', - ) - - class HttpCheck(proto.Message): - r"""Information involved in an HTTP/HTTPS Uptime check request. - Attributes: - request_method (google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck.RequestMethod): - The HTTP request method to use for the check. If set to - ``METHOD_UNSPECIFIED`` then ``request_method`` defaults to - ``GET``. - use_ssl (bool): - If ``true``, use HTTPS instead of HTTP to run the check. - path (str): - Optional (defaults to "/"). The path to the page against - which to run the check. Will be combined with the ``host`` - (specified within the ``monitored_resource``) and ``port`` - to construct the full URL. If the provided path does not - begin with "/", a "/" will be prepended automatically. - port (int): - Optional (defaults to 80 when ``use_ssl`` is ``false``, and - 443 when ``use_ssl`` is ``true``). The TCP port on the HTTP - server against which to run the check. Will be combined with - host (specified within the ``monitored_resource``) and - ``path`` to construct the full URL. - auth_info (google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck.BasicAuthentication): - The authentication information. Optional when - creating an HTTP check; defaults to empty. - mask_headers (bool): - Boolean specifying whether to encrypt the header - information. Encryption should be specified for any headers - related to authentication that you do not wish to be seen - when retrieving the configuration. The server will be - responsible for encrypting the headers. On Get/List calls, - if ``mask_headers`` is set to ``true`` then the headers will - be obscured with ``******.`` - headers (Sequence[google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck.HeadersEntry]): - The list of headers to send as part of the - Uptime check request. If two headers have the - same key and different values, they should be - entered as a single header, with the value being - a comma-separated list of all the desired values - as described at - https://www.w3.org/Protocols/rfc2616/rfc2616.txt - (page 31). Entering two separate headers with - the same key in a Create call will cause the - first to be overwritten by the second. The - maximum number of headers allowed is 100. - content_type (google.cloud.monitoring_v3.types.UptimeCheckConfig.HttpCheck.ContentType): - The content type header to use for the check. The following - configurations result in errors: - - 1. Content type is specified in both the ``headers`` field - and the ``content_type`` field. - 2. Request method is ``GET`` and ``content_type`` is not - ``TYPE_UNSPECIFIED`` - 3. Request method is ``POST`` and ``content_type`` is - ``TYPE_UNSPECIFIED``. - 4. Request method is ``POST`` and a "Content-Type" header is - provided via ``headers`` field. The ``content_type`` - field should be used instead. - validate_ssl (bool): - Boolean specifying whether to include SSL certificate - validation as a part of the Uptime check. Only applies to - checks where ``monitored_resource`` is set to - ``uptime_url``. If ``use_ssl`` is ``false``, setting - ``validate_ssl`` to ``true`` has no effect. - body (bytes): - The request body associated with the HTTP POST request. If - ``content_type`` is ``URL_ENCODED``, the body passed in must - be URL-encoded. Users can provide a ``Content-Length`` - header via the ``headers`` field or the API will do so. If - the ``request_method`` is ``GET`` and ``body`` is not empty, - the API will return an error. The maximum byte size is 1 - megabyte. Note: As with all ``bytes`` fields, JSON - representations are base64 encoded. e.g.: "foo=bar" in - URL-encoded form is "foo%3Dbar" and in base64 encoding is - "Zm9vJTI1M0RiYXI=". - """ - class RequestMethod(proto.Enum): - r"""The HTTP request method options.""" - METHOD_UNSPECIFIED = 0 - GET = 1 - POST = 2 - - class ContentType(proto.Enum): - r"""Header options corresponding to the content type of a HTTP - request body. - """ - TYPE_UNSPECIFIED = 0 - URL_ENCODED = 1 - - class BasicAuthentication(proto.Message): - r"""The authentication parameters to provide to the specified resource - or URL that requires a username and password. Currently, only `Basic - HTTP authentication `__ is - supported in Uptime checks. - - Attributes: - username (str): - The username to use when authenticating with - the HTTP server. - password (str): - The password to use when authenticating with - the HTTP server. - """ - - username = proto.Field( - proto.STRING, - number=1, - ) - password = proto.Field( - proto.STRING, - number=2, - ) - - request_method = proto.Field( - proto.ENUM, - number=8, - enum='UptimeCheckConfig.HttpCheck.RequestMethod', - ) - use_ssl = proto.Field( - proto.BOOL, - number=1, - ) - path = proto.Field( - proto.STRING, - number=2, - ) - port = proto.Field( - proto.INT32, - number=3, - ) - auth_info = proto.Field( - proto.MESSAGE, - number=4, - message='UptimeCheckConfig.HttpCheck.BasicAuthentication', - ) - mask_headers = proto.Field( - proto.BOOL, - number=5, - ) - headers = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - content_type = proto.Field( - proto.ENUM, - number=9, - enum='UptimeCheckConfig.HttpCheck.ContentType', - ) - validate_ssl = proto.Field( - proto.BOOL, - number=7, - ) - body = proto.Field( - proto.BYTES, - number=10, - ) - - class TcpCheck(proto.Message): - r"""Information required for a TCP Uptime check request. - Attributes: - port (int): - The TCP port on the server against which to run the check. - Will be combined with host (specified within the - ``monitored_resource``) to construct the full URL. Required. - """ - - port = proto.Field( - proto.INT32, - number=1, - ) - - class ContentMatcher(proto.Message): - r"""Optional. Used to perform content matching. This allows - matching based on substrings and regular expressions, together - with their negations. Only the first 4 MB of an HTTP or - HTTPS check's response (and the first 1 MB of a TCP check's - response) are examined for purposes of content matching. - - Attributes: - content (str): - String or regex content to match. Maximum 1024 bytes. An - empty ``content`` string indicates no content matching is to - be performed. - matcher (google.cloud.monitoring_v3.types.UptimeCheckConfig.ContentMatcher.ContentMatcherOption): - The type of content matcher that will be applied to the - server output, compared to the ``content`` string when the - check is run. - """ - class ContentMatcherOption(proto.Enum): - r"""Options to perform content matching.""" - CONTENT_MATCHER_OPTION_UNSPECIFIED = 0 - CONTAINS_STRING = 1 - NOT_CONTAINS_STRING = 2 - MATCHES_REGEX = 3 - NOT_MATCHES_REGEX = 4 - - content = proto.Field( - proto.STRING, - number=1, - ) - matcher = proto.Field( - proto.ENUM, - number=2, - enum='UptimeCheckConfig.ContentMatcher.ContentMatcherOption', - ) - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - monitored_resource = proto.Field( - proto.MESSAGE, - number=3, - oneof='resource', - message=monitored_resource_pb2.MonitoredResource, - ) - resource_group = proto.Field( - proto.MESSAGE, - number=4, - oneof='resource', - message=ResourceGroup, - ) - http_check = proto.Field( - proto.MESSAGE, - number=5, - oneof='check_request_type', - message=HttpCheck, - ) - tcp_check = proto.Field( - proto.MESSAGE, - number=6, - oneof='check_request_type', - message=TcpCheck, - ) - period = proto.Field( - proto.MESSAGE, - number=7, - message=duration_pb2.Duration, - ) - timeout = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - content_matchers = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=ContentMatcher, - ) - selected_regions = proto.RepeatedField( - proto.ENUM, - number=10, - enum='UptimeCheckRegion', - ) - is_internal = proto.Field( - proto.BOOL, - number=15, - ) - internal_checkers = proto.RepeatedField( - proto.MESSAGE, - number=14, - message='InternalChecker', - ) - - -class UptimeCheckIp(proto.Message): - r"""Contains the region, location, and list of IP - addresses where checkers in the location run from. - - Attributes: - region (google.cloud.monitoring_v3.types.UptimeCheckRegion): - A broad region category in which the IP - address is located. - location (str): - A more specific location within the region - that typically encodes a particular - city/town/metro (and its containing - state/province or country) within the broader - umbrella region category. - ip_address (str): - The IP address from which the Uptime check - originates. This is a fully specified IP address - (not an IP address range). Most IP addresses, as - of this publication, are in IPv4 format; - however, one should not rely on the IP addresses - being in IPv4 format indefinitely, and should - support interpreting this field in either IPv4 - or IPv6 format. - """ - - region = proto.Field( - proto.ENUM, - number=1, - enum='UptimeCheckRegion', - ) - location = proto.Field( - proto.STRING, - number=2, - ) - ip_address = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime_service.py b/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime_service.py deleted file mode 100644 index 2a1fbf90..00000000 --- a/owl-bot-staging/v3/google/cloud/monitoring_v3/types/uptime_service.py +++ /dev/null @@ -1,269 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.monitoring_v3.types import uptime -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.monitoring.v3', - manifest={ - 'ListUptimeCheckConfigsRequest', - 'ListUptimeCheckConfigsResponse', - 'GetUptimeCheckConfigRequest', - 'CreateUptimeCheckConfigRequest', - 'UpdateUptimeCheckConfigRequest', - 'DeleteUptimeCheckConfigRequest', - 'ListUptimeCheckIpsRequest', - 'ListUptimeCheckIpsResponse', - }, -) - - -class ListUptimeCheckConfigsRequest(proto.Message): - r"""The protocol for the ``ListUptimeCheckConfigs`` request. - Attributes: - parent (str): - Required. The - `project `__ - whose Uptime check configurations are listed. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - page_size (int): - The maximum number of results to return in a single - response. The server may further constrain the maximum - number of results returned in a single page. If the - page_size is <=0, the server will decide the number of - results to be returned. - page_token (str): - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return more - results from the previous method call. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListUptimeCheckConfigsResponse(proto.Message): - r"""The protocol for the ``ListUptimeCheckConfigs`` response. - Attributes: - uptime_check_configs (Sequence[google.cloud.monitoring_v3.types.UptimeCheckConfig]): - The returned Uptime check configurations. - next_page_token (str): - This field represents the pagination token to retrieve the - next page of results. If the value is empty, it means no - further results for the request. To retrieve the next page - of results, the value of the next_page_token is passed to - the subsequent List method call (in the request message's - page_token field). - total_size (int): - The total number of Uptime check - configurations for the project, irrespective of - any pagination. - """ - - @property - def raw_page(self): - return self - - uptime_check_configs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=uptime.UptimeCheckConfig, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - total_size = proto.Field( - proto.INT32, - number=3, - ) - - -class GetUptimeCheckConfigRequest(proto.Message): - r"""The protocol for the ``GetUptimeCheckConfig`` request. - Attributes: - name (str): - Required. The Uptime check configuration to retrieve. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateUptimeCheckConfigRequest(proto.Message): - r"""The protocol for the ``CreateUptimeCheckConfig`` request. - Attributes: - parent (str): - Required. The - `project `__ - in which to create the Uptime check. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - uptime_check_config (google.cloud.monitoring_v3.types.UptimeCheckConfig): - Required. The new Uptime check configuration. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - uptime_check_config = proto.Field( - proto.MESSAGE, - number=2, - message=uptime.UptimeCheckConfig, - ) - - -class UpdateUptimeCheckConfigRequest(proto.Message): - r"""The protocol for the ``UpdateUptimeCheckConfig`` request. - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. If present, only the listed fields - in the current Uptime check configuration are - updated with values from the new configuration. - If this field is empty, then the current - configuration is completely replaced with the - new configuration. - uptime_check_config (google.cloud.monitoring_v3.types.UptimeCheckConfig): - Required. If an ``updateMask`` has been specified, this - field gives the values for the set of fields mentioned in - the ``updateMask``. If an ``updateMask`` has not been given, - this Uptime check configuration replaces the current - configuration. If a field is mentioned in ``updateMask`` but - the corresonding field is omitted in this partial Uptime - check configuration, it has the effect of deleting/clearing - the field from the configuration on the server. - - The following fields can be updated: ``display_name``, - ``http_check``, ``tcp_check``, ``timeout``, - ``content_matchers``, and ``selected_regions``. - """ - - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - uptime_check_config = proto.Field( - proto.MESSAGE, - number=3, - message=uptime.UptimeCheckConfig, - ) - - -class DeleteUptimeCheckConfigRequest(proto.Message): - r"""The protocol for the ``DeleteUptimeCheckConfig`` request. - Attributes: - name (str): - Required. The Uptime check configuration to delete. The - format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListUptimeCheckIpsRequest(proto.Message): - r"""The protocol for the ``ListUptimeCheckIps`` request. - Attributes: - page_size (int): - The maximum number of results to return in a single - response. The server may further constrain the maximum - number of results returned in a single page. If the - page_size is <=0, the server will decide the number of - results to be returned. NOTE: this field is not yet - implemented - page_token (str): - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return more - results from the previous method call. NOTE: this field is - not yet implemented - """ - - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class ListUptimeCheckIpsResponse(proto.Message): - r"""The protocol for the ``ListUptimeCheckIps`` response. - Attributes: - uptime_check_ips (Sequence[google.cloud.monitoring_v3.types.UptimeCheckIp]): - The returned list of IP addresses (including - region and location) that the checkers run from. - next_page_token (str): - This field represents the pagination token to retrieve the - next page of results. If the value is empty, it means no - further results for the request. To retrieve the next page - of results, the value of the next_page_token is passed to - the subsequent List method call (in the request message's - page_token field). NOTE: this field is not yet implemented - """ - - @property - def raw_page(self): - return self - - uptime_check_ips = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=uptime.UptimeCheckIp, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v3/mypy.ini b/owl-bot-staging/v3/mypy.ini deleted file mode 100644 index 4505b485..00000000 --- a/owl-bot-staging/v3/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/v3/noxfile.py b/owl-bot-staging/v3/noxfile.py deleted file mode 100644 index 8fd2eb88..00000000 --- a/owl-bot-staging/v3/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/monitoring_v3/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.7') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.6') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/v3/scripts/fixup_monitoring_v3_keywords.py b/owl-bot-staging/v3/scripts/fixup_monitoring_v3_keywords.py deleted file mode 100644 index e2608d8a..00000000 --- a/owl-bot-staging/v3/scripts/fixup_monitoring_v3_keywords.py +++ /dev/null @@ -1,221 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class monitoringCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_alert_policy': ('name', 'alert_policy', ), - 'create_group': ('name', 'group', 'validate_only', ), - 'create_metric_descriptor': ('name', 'metric_descriptor', ), - 'create_notification_channel': ('name', 'notification_channel', ), - 'create_service': ('parent', 'service', 'service_id', ), - 'create_service_level_objective': ('parent', 'service_level_objective', 'service_level_objective_id', ), - 'create_time_series': ('name', 'time_series', ), - 'create_uptime_check_config': ('parent', 'uptime_check_config', ), - 'delete_alert_policy': ('name', ), - 'delete_group': ('name', 'recursive', ), - 'delete_metric_descriptor': ('name', ), - 'delete_notification_channel': ('name', 'force', ), - 'delete_service': ('name', ), - 'delete_service_level_objective': ('name', ), - 'delete_uptime_check_config': ('name', ), - 'get_alert_policy': ('name', ), - 'get_group': ('name', ), - 'get_metric_descriptor': ('name', ), - 'get_monitored_resource_descriptor': ('name', ), - 'get_notification_channel': ('name', ), - 'get_notification_channel_descriptor': ('name', ), - 'get_notification_channel_verification_code': ('name', 'expire_time', ), - 'get_service': ('name', ), - 'get_service_level_objective': ('name', 'view', ), - 'get_uptime_check_config': ('name', ), - 'list_alert_policies': ('name', 'filter', 'order_by', 'page_size', 'page_token', ), - 'list_group_members': ('name', 'page_size', 'page_token', 'filter', 'interval', ), - 'list_groups': ('name', 'children_of_group', 'ancestors_of_group', 'descendants_of_group', 'page_size', 'page_token', ), - 'list_metric_descriptors': ('name', 'filter', 'page_size', 'page_token', ), - 'list_monitored_resource_descriptors': ('name', 'filter', 'page_size', 'page_token', ), - 'list_notification_channel_descriptors': ('name', 'page_size', 'page_token', ), - 'list_notification_channels': ('name', 'filter', 'order_by', 'page_size', 'page_token', ), - 'list_service_level_objectives': ('parent', 'filter', 'page_size', 'page_token', 'view', ), - 'list_services': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_time_series': ('name', 'filter', 'interval', 'view', 'aggregation', 'secondary_aggregation', 'order_by', 'page_size', 'page_token', ), - 'list_uptime_check_configs': ('parent', 'page_size', 'page_token', ), - 'list_uptime_check_ips': ('page_size', 'page_token', ), - 'query_time_series': ('name', 'query', 'page_size', 'page_token', ), - 'send_notification_channel_verification_code': ('name', ), - 'update_alert_policy': ('alert_policy', 'update_mask', ), - 'update_group': ('group', 'validate_only', ), - 'update_notification_channel': ('notification_channel', 'update_mask', ), - 'update_service': ('service', 'update_mask', ), - 'update_service_level_objective': ('service_level_objective', 'update_mask', ), - 'update_uptime_check_config': ('uptime_check_config', 'update_mask', ), - 'verify_notification_channel': ('name', 'code', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=monitoringCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the monitoring client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v3/setup.py b/owl-bot-staging/v3/setup.py deleted file mode 100644 index 375a4601..00000000 --- a/owl-bot-staging/v3/setup.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-monitoring', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/v3/tests/__init__.py b/owl-bot-staging/v3/tests/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v3/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v3/tests/unit/__init__.py b/owl-bot-staging/v3/tests/unit/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v3/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v3/tests/unit/gapic/__init__.py b/owl-bot-staging/v3/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v3/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/__init__.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py deleted file mode 100644 index d1966f81..00000000 --- a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py +++ /dev/null @@ -1,2365 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.monitoring_v3.services.alert_policy_service import AlertPolicyServiceAsyncClient -from google.cloud.monitoring_v3.services.alert_policy_service import AlertPolicyServiceClient -from google.cloud.monitoring_v3.services.alert_policy_service import pagers -from google.cloud.monitoring_v3.services.alert_policy_service import transports -from google.cloud.monitoring_v3.services.alert_policy_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.monitoring_v3.types import alert -from google.cloud.monitoring_v3.types import alert_service -from google.cloud.monitoring_v3.types import common -from google.cloud.monitoring_v3.types import mutation_record -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AlertPolicyServiceClient._get_default_mtls_endpoint(None) is None - assert AlertPolicyServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AlertPolicyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AlertPolicyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AlertPolicyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AlertPolicyServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - AlertPolicyServiceClient, - AlertPolicyServiceAsyncClient, -]) -def test_alert_policy_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AlertPolicyServiceGrpcTransport, "grpc"), - (transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_alert_policy_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - AlertPolicyServiceClient, - AlertPolicyServiceAsyncClient, -]) -def test_alert_policy_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_alert_policy_service_client_get_transport_class(): - transport = AlertPolicyServiceClient.get_transport_class() - available_transports = [ - transports.AlertPolicyServiceGrpcTransport, - ] - assert transport in available_transports - - transport = AlertPolicyServiceClient.get_transport_class("grpc") - assert transport == transports.AlertPolicyServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc"), - (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(AlertPolicyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlertPolicyServiceClient)) -@mock.patch.object(AlertPolicyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlertPolicyServiceAsyncClient)) -def test_alert_policy_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AlertPolicyServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AlertPolicyServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc", "true"), - (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc", "false"), - (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(AlertPolicyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlertPolicyServiceClient)) -@mock.patch.object(AlertPolicyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlertPolicyServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_alert_policy_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc"), - (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_alert_policy_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AlertPolicyServiceClient, transports.AlertPolicyServiceGrpcTransport, "grpc"), - (AlertPolicyServiceAsyncClient, transports.AlertPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_alert_policy_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_alert_policy_service_client_client_options_from_dict(): - with mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AlertPolicyServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_list_alert_policies(transport: str = 'grpc', request_type=alert_service.ListAlertPoliciesRequest): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert_service.ListAlertPoliciesResponse( - next_page_token='next_page_token_value', - total_size=1086, - ) - response = client.list_alert_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.ListAlertPoliciesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAlertPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.total_size == 1086 - - -def test_list_alert_policies_from_dict(): - test_list_alert_policies(request_type=dict) - - -def test_list_alert_policies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__') as call: - client.list_alert_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.ListAlertPoliciesRequest() - - -@pytest.mark.asyncio -async def test_list_alert_policies_async(transport: str = 'grpc_asyncio', request_type=alert_service.ListAlertPoliciesRequest): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(alert_service.ListAlertPoliciesResponse( - next_page_token='next_page_token_value', - total_size=1086, - )) - response = await client.list_alert_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.ListAlertPoliciesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAlertPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.total_size == 1086 - - -@pytest.mark.asyncio -async def test_list_alert_policies_async_from_dict(): - await test_list_alert_policies_async(request_type=dict) - - -def test_list_alert_policies_field_headers(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.ListAlertPoliciesRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__') as call: - call.return_value = alert_service.ListAlertPoliciesResponse() - client.list_alert_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_alert_policies_field_headers_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.ListAlertPoliciesRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert_service.ListAlertPoliciesResponse()) - await client.list_alert_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_alert_policies_flattened(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert_service.ListAlertPoliciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_alert_policies( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_list_alert_policies_flattened_error(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_alert_policies( - alert_service.ListAlertPoliciesRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_list_alert_policies_flattened_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert_service.ListAlertPoliciesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert_service.ListAlertPoliciesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_alert_policies( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_list_alert_policies_flattened_error_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_alert_policies( - alert_service.ListAlertPoliciesRequest(), - name='name_value', - ) - - -def test_list_alert_policies_pager(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - alert.AlertPolicy(), - alert.AlertPolicy(), - ], - next_page_token='abc', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[], - next_page_token='def', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - ], - next_page_token='ghi', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - alert.AlertPolicy(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.list_alert_policies(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, alert.AlertPolicy) - for i in results) - -def test_list_alert_policies_pages(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - alert.AlertPolicy(), - alert.AlertPolicy(), - ], - next_page_token='abc', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[], - next_page_token='def', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - ], - next_page_token='ghi', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - alert.AlertPolicy(), - ], - ), - RuntimeError, - ) - pages = list(client.list_alert_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_alert_policies_async_pager(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - alert.AlertPolicy(), - alert.AlertPolicy(), - ], - next_page_token='abc', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[], - next_page_token='def', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - ], - next_page_token='ghi', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - alert.AlertPolicy(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_alert_policies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, alert.AlertPolicy) - for i in responses) - -@pytest.mark.asyncio -async def test_list_alert_policies_async_pages(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_alert_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - alert.AlertPolicy(), - alert.AlertPolicy(), - ], - next_page_token='abc', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[], - next_page_token='def', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - ], - next_page_token='ghi', - ), - alert_service.ListAlertPoliciesResponse( - alert_policies=[ - alert.AlertPolicy(), - alert.AlertPolicy(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_alert_policies(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_alert_policy(transport: str = 'grpc', request_type=alert_service.GetAlertPolicyRequest): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert.AlertPolicy( - name='name_value', - display_name='display_name_value', - combiner=alert.AlertPolicy.ConditionCombinerType.AND, - notification_channels=['notification_channels_value'], - ) - response = client.get_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.GetAlertPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, alert.AlertPolicy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND - assert response.notification_channels == ['notification_channels_value'] - - -def test_get_alert_policy_from_dict(): - test_get_alert_policy(request_type=dict) - - -def test_get_alert_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_alert_policy), - '__call__') as call: - client.get_alert_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.GetAlertPolicyRequest() - - -@pytest.mark.asyncio -async def test_get_alert_policy_async(transport: str = 'grpc_asyncio', request_type=alert_service.GetAlertPolicyRequest): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy( - name='name_value', - display_name='display_name_value', - combiner=alert.AlertPolicy.ConditionCombinerType.AND, - notification_channels=['notification_channels_value'], - )) - response = await client.get_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.GetAlertPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, alert.AlertPolicy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND - assert response.notification_channels == ['notification_channels_value'] - - -@pytest.mark.asyncio -async def test_get_alert_policy_async_from_dict(): - await test_get_alert_policy_async(request_type=dict) - - -def test_get_alert_policy_field_headers(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.GetAlertPolicyRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_alert_policy), - '__call__') as call: - call.return_value = alert.AlertPolicy() - client.get_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_alert_policy_field_headers_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.GetAlertPolicyRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_alert_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) - await client.get_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_alert_policy_flattened(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert.AlertPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_alert_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_alert_policy_flattened_error(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_alert_policy( - alert_service.GetAlertPolicyRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_alert_policy_flattened_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert.AlertPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_alert_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_alert_policy_flattened_error_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_alert_policy( - alert_service.GetAlertPolicyRequest(), - name='name_value', - ) - - -def test_create_alert_policy(transport: str = 'grpc', request_type=alert_service.CreateAlertPolicyRequest): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert.AlertPolicy( - name='name_value', - display_name='display_name_value', - combiner=alert.AlertPolicy.ConditionCombinerType.AND, - notification_channels=['notification_channels_value'], - ) - response = client.create_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.CreateAlertPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, alert.AlertPolicy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND - assert response.notification_channels == ['notification_channels_value'] - - -def test_create_alert_policy_from_dict(): - test_create_alert_policy(request_type=dict) - - -def test_create_alert_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_alert_policy), - '__call__') as call: - client.create_alert_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.CreateAlertPolicyRequest() - - -@pytest.mark.asyncio -async def test_create_alert_policy_async(transport: str = 'grpc_asyncio', request_type=alert_service.CreateAlertPolicyRequest): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy( - name='name_value', - display_name='display_name_value', - combiner=alert.AlertPolicy.ConditionCombinerType.AND, - notification_channels=['notification_channels_value'], - )) - response = await client.create_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.CreateAlertPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, alert.AlertPolicy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND - assert response.notification_channels == ['notification_channels_value'] - - -@pytest.mark.asyncio -async def test_create_alert_policy_async_from_dict(): - await test_create_alert_policy_async(request_type=dict) - - -def test_create_alert_policy_field_headers(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.CreateAlertPolicyRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_alert_policy), - '__call__') as call: - call.return_value = alert.AlertPolicy() - client.create_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_alert_policy_field_headers_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.CreateAlertPolicyRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_alert_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) - await client.create_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_create_alert_policy_flattened(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert.AlertPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_alert_policy( - name='name_value', - alert_policy=alert.AlertPolicy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].alert_policy == alert.AlertPolicy(name='name_value') - - -def test_create_alert_policy_flattened_error(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_alert_policy( - alert_service.CreateAlertPolicyRequest(), - name='name_value', - alert_policy=alert.AlertPolicy(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_alert_policy_flattened_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert.AlertPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_alert_policy( - name='name_value', - alert_policy=alert.AlertPolicy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].alert_policy == alert.AlertPolicy(name='name_value') - - -@pytest.mark.asyncio -async def test_create_alert_policy_flattened_error_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_alert_policy( - alert_service.CreateAlertPolicyRequest(), - name='name_value', - alert_policy=alert.AlertPolicy(name='name_value'), - ) - - -def test_delete_alert_policy(transport: str = 'grpc', request_type=alert_service.DeleteAlertPolicyRequest): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.DeleteAlertPolicyRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_alert_policy_from_dict(): - test_delete_alert_policy(request_type=dict) - - -def test_delete_alert_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_alert_policy), - '__call__') as call: - client.delete_alert_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.DeleteAlertPolicyRequest() - - -@pytest.mark.asyncio -async def test_delete_alert_policy_async(transport: str = 'grpc_asyncio', request_type=alert_service.DeleteAlertPolicyRequest): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.DeleteAlertPolicyRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_alert_policy_async_from_dict(): - await test_delete_alert_policy_async(request_type=dict) - - -def test_delete_alert_policy_field_headers(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.DeleteAlertPolicyRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_alert_policy), - '__call__') as call: - call.return_value = None - client.delete_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_alert_policy_field_headers_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.DeleteAlertPolicyRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_alert_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_alert_policy_flattened(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_alert_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_alert_policy_flattened_error(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_alert_policy( - alert_service.DeleteAlertPolicyRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_alert_policy_flattened_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_alert_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_alert_policy_flattened_error_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_alert_policy( - alert_service.DeleteAlertPolicyRequest(), - name='name_value', - ) - - -def test_update_alert_policy(transport: str = 'grpc', request_type=alert_service.UpdateAlertPolicyRequest): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert.AlertPolicy( - name='name_value', - display_name='display_name_value', - combiner=alert.AlertPolicy.ConditionCombinerType.AND, - notification_channels=['notification_channels_value'], - ) - response = client.update_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.UpdateAlertPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, alert.AlertPolicy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND - assert response.notification_channels == ['notification_channels_value'] - - -def test_update_alert_policy_from_dict(): - test_update_alert_policy(request_type=dict) - - -def test_update_alert_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_alert_policy), - '__call__') as call: - client.update_alert_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.UpdateAlertPolicyRequest() - - -@pytest.mark.asyncio -async def test_update_alert_policy_async(transport: str = 'grpc_asyncio', request_type=alert_service.UpdateAlertPolicyRequest): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy( - name='name_value', - display_name='display_name_value', - combiner=alert.AlertPolicy.ConditionCombinerType.AND, - notification_channels=['notification_channels_value'], - )) - response = await client.update_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == alert_service.UpdateAlertPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, alert.AlertPolicy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND - assert response.notification_channels == ['notification_channels_value'] - - -@pytest.mark.asyncio -async def test_update_alert_policy_async_from_dict(): - await test_update_alert_policy_async(request_type=dict) - - -def test_update_alert_policy_field_headers(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.UpdateAlertPolicyRequest() - - request.alert_policy.name = 'alert_policy.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_alert_policy), - '__call__') as call: - call.return_value = alert.AlertPolicy() - client.update_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'alert_policy.name=alert_policy.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_alert_policy_field_headers_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = alert_service.UpdateAlertPolicyRequest() - - request.alert_policy.name = 'alert_policy.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_alert_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) - await client.update_alert_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'alert_policy.name=alert_policy.name/value', - ) in kw['metadata'] - - -def test_update_alert_policy_flattened(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert.AlertPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_alert_policy( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - alert_policy=alert.AlertPolicy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - assert args[0].alert_policy == alert.AlertPolicy(name='name_value') - - -def test_update_alert_policy_flattened_error(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_alert_policy( - alert_service.UpdateAlertPolicyRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - alert_policy=alert.AlertPolicy(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_update_alert_policy_flattened_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_alert_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = alert.AlertPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(alert.AlertPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_alert_policy( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - alert_policy=alert.AlertPolicy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - assert args[0].alert_policy == alert.AlertPolicy(name='name_value') - - -@pytest.mark.asyncio -async def test_update_alert_policy_flattened_error_async(): - client = AlertPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_alert_policy( - alert_service.UpdateAlertPolicyRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - alert_policy=alert.AlertPolicy(name='name_value'), - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AlertPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AlertPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AlertPolicyServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AlertPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AlertPolicyServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AlertPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AlertPolicyServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AlertPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AlertPolicyServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AlertPolicyServiceGrpcTransport, - transports.AlertPolicyServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AlertPolicyServiceGrpcTransport, - ) - -def test_alert_policy_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AlertPolicyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_alert_policy_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AlertPolicyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_alert_policies', - 'get_alert_policy', - 'create_alert_policy', - 'delete_alert_policy', - 'update_alert_policy', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_alert_policy_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AlertPolicyServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_alert_policy_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AlertPolicyServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ), - quota_project_id="octopus", - ) - - -def test_alert_policy_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.alert_policy_service.transports.AlertPolicyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AlertPolicyServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_alert_policy_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AlertPolicyServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_alert_policy_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AlertPolicyServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AlertPolicyServiceGrpcTransport, - transports.AlertPolicyServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_alert_policy_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AlertPolicyServiceGrpcTransport, - transports.AlertPolicyServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_alert_policy_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AlertPolicyServiceGrpcTransport, grpc_helpers), - (transports.AlertPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_alert_policy_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "monitoring.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - scopes=["1", "2"], - default_host="monitoring.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AlertPolicyServiceGrpcTransport, transports.AlertPolicyServiceGrpcAsyncIOTransport]) -def test_alert_policy_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_alert_policy_service_host_no_port(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), - ) - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_alert_policy_service_host_with_port(): - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), - ) - assert client.transport._host == 'monitoring.googleapis.com:8000' - -def test_alert_policy_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AlertPolicyServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_alert_policy_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AlertPolicyServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AlertPolicyServiceGrpcTransport, transports.AlertPolicyServiceGrpcAsyncIOTransport]) -def test_alert_policy_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AlertPolicyServiceGrpcTransport, transports.AlertPolicyServiceGrpcAsyncIOTransport]) -def test_alert_policy_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_alert_policy_path(): - project = "squid" - alert_policy = "clam" - expected = "projects/{project}/alertPolicies/{alert_policy}".format(project=project, alert_policy=alert_policy, ) - actual = AlertPolicyServiceClient.alert_policy_path(project, alert_policy) - assert expected == actual - - -def test_parse_alert_policy_path(): - expected = { - "project": "whelk", - "alert_policy": "octopus", - } - path = AlertPolicyServiceClient.alert_policy_path(**expected) - - # Check that the path construction is reversible. - actual = AlertPolicyServiceClient.parse_alert_policy_path(path) - assert expected == actual - -def test_alert_policy_condition_path(): - project = "oyster" - alert_policy = "nudibranch" - condition = "cuttlefish" - expected = "projects/{project}/alertPolicies/{alert_policy}/conditions/{condition}".format(project=project, alert_policy=alert_policy, condition=condition, ) - actual = AlertPolicyServiceClient.alert_policy_condition_path(project, alert_policy, condition) - assert expected == actual - - -def test_parse_alert_policy_condition_path(): - expected = { - "project": "mussel", - "alert_policy": "winkle", - "condition": "nautilus", - } - path = AlertPolicyServiceClient.alert_policy_condition_path(**expected) - - # Check that the path construction is reversible. - actual = AlertPolicyServiceClient.parse_alert_policy_condition_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AlertPolicyServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = AlertPolicyServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AlertPolicyServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = AlertPolicyServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = AlertPolicyServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AlertPolicyServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AlertPolicyServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = AlertPolicyServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AlertPolicyServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = AlertPolicyServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = AlertPolicyServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AlertPolicyServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AlertPolicyServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = AlertPolicyServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AlertPolicyServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AlertPolicyServiceTransport, '_prep_wrapped_messages') as prep: - client = AlertPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AlertPolicyServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AlertPolicyServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_group_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_group_service.py deleted file mode 100644 index 4c21480d..00000000 --- a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_group_service.py +++ /dev/null @@ -1,2765 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api import monitored_resource_pb2 # type: ignore -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.monitoring_v3.services.group_service import GroupServiceAsyncClient -from google.cloud.monitoring_v3.services.group_service import GroupServiceClient -from google.cloud.monitoring_v3.services.group_service import pagers -from google.cloud.monitoring_v3.services.group_service import transports -from google.cloud.monitoring_v3.services.group_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.monitoring_v3.types import common -from google.cloud.monitoring_v3.types import group -from google.cloud.monitoring_v3.types import group as gm_group -from google.cloud.monitoring_v3.types import group_service -from google.oauth2 import service_account -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert GroupServiceClient._get_default_mtls_endpoint(None) is None - assert GroupServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert GroupServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert GroupServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert GroupServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert GroupServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - GroupServiceClient, - GroupServiceAsyncClient, -]) -def test_group_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.GroupServiceGrpcTransport, "grpc"), - (transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_group_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - GroupServiceClient, - GroupServiceAsyncClient, -]) -def test_group_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_group_service_client_get_transport_class(): - transport = GroupServiceClient.get_transport_class() - available_transports = [ - transports.GroupServiceGrpcTransport, - ] - assert transport in available_transports - - transport = GroupServiceClient.get_transport_class("grpc") - assert transport == transports.GroupServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc"), - (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(GroupServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GroupServiceClient)) -@mock.patch.object(GroupServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GroupServiceAsyncClient)) -def test_group_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(GroupServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(GroupServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc", "true"), - (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc", "false"), - (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(GroupServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GroupServiceClient)) -@mock.patch.object(GroupServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GroupServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_group_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc"), - (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_group_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (GroupServiceClient, transports.GroupServiceGrpcTransport, "grpc"), - (GroupServiceAsyncClient, transports.GroupServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_group_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_group_service_client_client_options_from_dict(): - with mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = GroupServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_list_groups(transport: str = 'grpc', request_type=group_service.ListGroupsRequest): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = group_service.ListGroupsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.ListGroupsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGroupsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_groups_from_dict(): - test_list_groups(request_type=dict) - - -def test_list_groups_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__') as call: - client.list_groups() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.ListGroupsRequest() - - -@pytest.mark.asyncio -async def test_list_groups_async(transport: str = 'grpc_asyncio', request_type=group_service.ListGroupsRequest): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.ListGroupsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGroupsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_groups_async_from_dict(): - await test_list_groups_async(request_type=dict) - - -def test_list_groups_field_headers(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.ListGroupsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__') as call: - call.return_value = group_service.ListGroupsResponse() - client.list_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_groups_field_headers_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.ListGroupsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupsResponse()) - await client.list_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_groups_flattened(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = group_service.ListGroupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_groups( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_list_groups_flattened_error(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_groups( - group_service.ListGroupsRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_list_groups_flattened_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = group_service.ListGroupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_groups( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_list_groups_flattened_error_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_groups( - group_service.ListGroupsRequest(), - name='name_value', - ) - - -def test_list_groups_pager(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - group_service.ListGroupsResponse( - group=[ - group.Group(), - group.Group(), - group.Group(), - ], - next_page_token='abc', - ), - group_service.ListGroupsResponse( - group=[], - next_page_token='def', - ), - group_service.ListGroupsResponse( - group=[ - group.Group(), - ], - next_page_token='ghi', - ), - group_service.ListGroupsResponse( - group=[ - group.Group(), - group.Group(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.list_groups(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, group.Group) - for i in results) - -def test_list_groups_pages(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - group_service.ListGroupsResponse( - group=[ - group.Group(), - group.Group(), - group.Group(), - ], - next_page_token='abc', - ), - group_service.ListGroupsResponse( - group=[], - next_page_token='def', - ), - group_service.ListGroupsResponse( - group=[ - group.Group(), - ], - next_page_token='ghi', - ), - group_service.ListGroupsResponse( - group=[ - group.Group(), - group.Group(), - ], - ), - RuntimeError, - ) - pages = list(client.list_groups(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_groups_async_pager(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - group_service.ListGroupsResponse( - group=[ - group.Group(), - group.Group(), - group.Group(), - ], - next_page_token='abc', - ), - group_service.ListGroupsResponse( - group=[], - next_page_token='def', - ), - group_service.ListGroupsResponse( - group=[ - group.Group(), - ], - next_page_token='ghi', - ), - group_service.ListGroupsResponse( - group=[ - group.Group(), - group.Group(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_groups(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, group.Group) - for i in responses) - -@pytest.mark.asyncio -async def test_list_groups_async_pages(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - group_service.ListGroupsResponse( - group=[ - group.Group(), - group.Group(), - group.Group(), - ], - next_page_token='abc', - ), - group_service.ListGroupsResponse( - group=[], - next_page_token='def', - ), - group_service.ListGroupsResponse( - group=[ - group.Group(), - ], - next_page_token='ghi', - ), - group_service.ListGroupsResponse( - group=[ - group.Group(), - group.Group(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_groups(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_group(transport: str = 'grpc', request_type=group_service.GetGroupRequest): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = group.Group( - name='name_value', - display_name='display_name_value', - parent_name='parent_name_value', - filter='filter_value', - is_cluster=True, - ) - response = client.get_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.GetGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, group.Group) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.parent_name == 'parent_name_value' - assert response.filter == 'filter_value' - assert response.is_cluster is True - - -def test_get_group_from_dict(): - test_get_group(request_type=dict) - - -def test_get_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_group), - '__call__') as call: - client.get_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.GetGroupRequest() - - -@pytest.mark.asyncio -async def test_get_group_async(transport: str = 'grpc_asyncio', request_type=group_service.GetGroupRequest): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(group.Group( - name='name_value', - display_name='display_name_value', - parent_name='parent_name_value', - filter='filter_value', - is_cluster=True, - )) - response = await client.get_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.GetGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, group.Group) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.parent_name == 'parent_name_value' - assert response.filter == 'filter_value' - assert response.is_cluster is True - - -@pytest.mark.asyncio -async def test_get_group_async_from_dict(): - await test_get_group_async(request_type=dict) - - -def test_get_group_field_headers(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.GetGroupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_group), - '__call__') as call: - call.return_value = group.Group() - client.get_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_group_field_headers_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.GetGroupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group.Group()) - await client.get_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_group_flattened(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = group.Group() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_group_flattened_error(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_group( - group_service.GetGroupRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_group_flattened_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = group.Group() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group.Group()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_group_flattened_error_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_group( - group_service.GetGroupRequest(), - name='name_value', - ) - - -def test_create_group(transport: str = 'grpc', request_type=group_service.CreateGroupRequest): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_group.Group( - name='name_value', - display_name='display_name_value', - parent_name='parent_name_value', - filter='filter_value', - is_cluster=True, - ) - response = client.create_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.CreateGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gm_group.Group) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.parent_name == 'parent_name_value' - assert response.filter == 'filter_value' - assert response.is_cluster is True - - -def test_create_group_from_dict(): - test_create_group(request_type=dict) - - -def test_create_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_group), - '__call__') as call: - client.create_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.CreateGroupRequest() - - -@pytest.mark.asyncio -async def test_create_group_async(transport: str = 'grpc_asyncio', request_type=group_service.CreateGroupRequest): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group( - name='name_value', - display_name='display_name_value', - parent_name='parent_name_value', - filter='filter_value', - is_cluster=True, - )) - response = await client.create_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.CreateGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gm_group.Group) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.parent_name == 'parent_name_value' - assert response.filter == 'filter_value' - assert response.is_cluster is True - - -@pytest.mark.asyncio -async def test_create_group_async_from_dict(): - await test_create_group_async(request_type=dict) - - -def test_create_group_field_headers(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.CreateGroupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_group), - '__call__') as call: - call.return_value = gm_group.Group() - client.create_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_group_field_headers_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.CreateGroupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group()) - await client.create_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_create_group_flattened(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_group.Group() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_group( - name='name_value', - group=gm_group.Group(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].group == gm_group.Group(name='name_value') - - -def test_create_group_flattened_error(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_group( - group_service.CreateGroupRequest(), - name='name_value', - group=gm_group.Group(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_group_flattened_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_group.Group() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_group( - name='name_value', - group=gm_group.Group(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].group == gm_group.Group(name='name_value') - - -@pytest.mark.asyncio -async def test_create_group_flattened_error_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_group( - group_service.CreateGroupRequest(), - name='name_value', - group=gm_group.Group(name='name_value'), - ) - - -def test_update_group(transport: str = 'grpc', request_type=group_service.UpdateGroupRequest): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_group.Group( - name='name_value', - display_name='display_name_value', - parent_name='parent_name_value', - filter='filter_value', - is_cluster=True, - ) - response = client.update_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.UpdateGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gm_group.Group) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.parent_name == 'parent_name_value' - assert response.filter == 'filter_value' - assert response.is_cluster is True - - -def test_update_group_from_dict(): - test_update_group(request_type=dict) - - -def test_update_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_group), - '__call__') as call: - client.update_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.UpdateGroupRequest() - - -@pytest.mark.asyncio -async def test_update_group_async(transport: str = 'grpc_asyncio', request_type=group_service.UpdateGroupRequest): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group( - name='name_value', - display_name='display_name_value', - parent_name='parent_name_value', - filter='filter_value', - is_cluster=True, - )) - response = await client.update_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.UpdateGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gm_group.Group) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.parent_name == 'parent_name_value' - assert response.filter == 'filter_value' - assert response.is_cluster is True - - -@pytest.mark.asyncio -async def test_update_group_async_from_dict(): - await test_update_group_async(request_type=dict) - - -def test_update_group_field_headers(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.UpdateGroupRequest() - - request.group.name = 'group.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_group), - '__call__') as call: - call.return_value = gm_group.Group() - client.update_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'group.name=group.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_group_field_headers_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.UpdateGroupRequest() - - request.group.name = 'group.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group()) - await client.update_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'group.name=group.name/value', - ) in kw['metadata'] - - -def test_update_group_flattened(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_group.Group() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_group( - group=gm_group.Group(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].group == gm_group.Group(name='name_value') - - -def test_update_group_flattened_error(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_group( - group_service.UpdateGroupRequest(), - group=gm_group.Group(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_update_group_flattened_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_group.Group() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_group.Group()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_group( - group=gm_group.Group(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].group == gm_group.Group(name='name_value') - - -@pytest.mark.asyncio -async def test_update_group_flattened_error_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_group( - group_service.UpdateGroupRequest(), - group=gm_group.Group(name='name_value'), - ) - - -def test_delete_group(transport: str = 'grpc', request_type=group_service.DeleteGroupRequest): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.DeleteGroupRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_group_from_dict(): - test_delete_group(request_type=dict) - - -def test_delete_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_group), - '__call__') as call: - client.delete_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.DeleteGroupRequest() - - -@pytest.mark.asyncio -async def test_delete_group_async(transport: str = 'grpc_asyncio', request_type=group_service.DeleteGroupRequest): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.DeleteGroupRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_group_async_from_dict(): - await test_delete_group_async(request_type=dict) - - -def test_delete_group_field_headers(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.DeleteGroupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_group), - '__call__') as call: - call.return_value = None - client.delete_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_group_field_headers_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.DeleteGroupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_group_flattened(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_group_flattened_error(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_group( - group_service.DeleteGroupRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_group_flattened_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_group_flattened_error_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_group( - group_service.DeleteGroupRequest(), - name='name_value', - ) - - -def test_list_group_members(transport: str = 'grpc', request_type=group_service.ListGroupMembersRequest): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = group_service.ListGroupMembersResponse( - next_page_token='next_page_token_value', - total_size=1086, - ) - response = client.list_group_members(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.ListGroupMembersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGroupMembersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.total_size == 1086 - - -def test_list_group_members_from_dict(): - test_list_group_members(request_type=dict) - - -def test_list_group_members_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__') as call: - client.list_group_members() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.ListGroupMembersRequest() - - -@pytest.mark.asyncio -async def test_list_group_members_async(transport: str = 'grpc_asyncio', request_type=group_service.ListGroupMembersRequest): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupMembersResponse( - next_page_token='next_page_token_value', - total_size=1086, - )) - response = await client.list_group_members(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == group_service.ListGroupMembersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGroupMembersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.total_size == 1086 - - -@pytest.mark.asyncio -async def test_list_group_members_async_from_dict(): - await test_list_group_members_async(request_type=dict) - - -def test_list_group_members_field_headers(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.ListGroupMembersRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__') as call: - call.return_value = group_service.ListGroupMembersResponse() - client.list_group_members(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_group_members_field_headers_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = group_service.ListGroupMembersRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupMembersResponse()) - await client.list_group_members(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_group_members_flattened(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = group_service.ListGroupMembersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_group_members( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_list_group_members_flattened_error(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_group_members( - group_service.ListGroupMembersRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_list_group_members_flattened_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = group_service.ListGroupMembersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(group_service.ListGroupMembersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_group_members( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_list_group_members_flattened_error_async(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_group_members( - group_service.ListGroupMembersRequest(), - name='name_value', - ) - - -def test_list_group_members_pager(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - ], - next_page_token='abc', - ), - group_service.ListGroupMembersResponse( - members=[], - next_page_token='def', - ), - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - ], - next_page_token='ghi', - ), - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.list_group_members(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResource) - for i in results) - -def test_list_group_members_pages(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - ], - next_page_token='abc', - ), - group_service.ListGroupMembersResponse( - members=[], - next_page_token='def', - ), - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - ], - next_page_token='ghi', - ), - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - ], - ), - RuntimeError, - ) - pages = list(client.list_group_members(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_group_members_async_pager(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - ], - next_page_token='abc', - ), - group_service.ListGroupMembersResponse( - members=[], - next_page_token='def', - ), - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - ], - next_page_token='ghi', - ), - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_group_members(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResource) - for i in responses) - -@pytest.mark.asyncio -async def test_list_group_members_async_pages(): - client = GroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_group_members), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - ], - next_page_token='abc', - ), - group_service.ListGroupMembersResponse( - members=[], - next_page_token='def', - ), - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - ], - next_page_token='ghi', - ), - group_service.ListGroupMembersResponse( - members=[ - monitored_resource_pb2.MonitoredResource(), - monitored_resource_pb2.MonitoredResource(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_group_members(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.GroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.GroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = GroupServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.GroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = GroupServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.GroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = GroupServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.GroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.GroupServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.GroupServiceGrpcTransport, - transports.GroupServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.GroupServiceGrpcTransport, - ) - -def test_group_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.GroupServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_group_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.GroupServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_groups', - 'get_group', - 'create_group', - 'update_group', - 'delete_group', - 'list_group_members', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_group_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GroupServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_group_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GroupServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ), - quota_project_id="octopus", - ) - - -def test_group_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.group_service.transports.GroupServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GroupServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_group_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GroupServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_group_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GroupServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.GroupServiceGrpcTransport, - transports.GroupServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_group_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.GroupServiceGrpcTransport, - transports.GroupServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_group_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.GroupServiceGrpcTransport, grpc_helpers), - (transports.GroupServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_group_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "monitoring.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - scopes=["1", "2"], - default_host="monitoring.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.GroupServiceGrpcTransport, transports.GroupServiceGrpcAsyncIOTransport]) -def test_group_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_group_service_host_no_port(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), - ) - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_group_service_host_with_port(): - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), - ) - assert client.transport._host == 'monitoring.googleapis.com:8000' - -def test_group_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.GroupServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_group_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.GroupServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.GroupServiceGrpcTransport, transports.GroupServiceGrpcAsyncIOTransport]) -def test_group_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.GroupServiceGrpcTransport, transports.GroupServiceGrpcAsyncIOTransport]) -def test_group_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_group_path(): - project = "squid" - group = "clam" - expected = "projects/{project}/groups/{group}".format(project=project, group=group, ) - actual = GroupServiceClient.group_path(project, group) - assert expected == actual - - -def test_parse_group_path(): - expected = { - "project": "whelk", - "group": "octopus", - } - path = GroupServiceClient.group_path(**expected) - - # Check that the path construction is reversible. - actual = GroupServiceClient.parse_group_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = GroupServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = GroupServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = GroupServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = GroupServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = GroupServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = GroupServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = GroupServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = GroupServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = GroupServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = GroupServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = GroupServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = GroupServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = GroupServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = GroupServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = GroupServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.GroupServiceTransport, '_prep_wrapped_messages') as prep: - client = GroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.GroupServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = GroupServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_metric_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_metric_service.py deleted file mode 100644 index ccb2f6d0..00000000 --- a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_metric_service.py +++ /dev/null @@ -1,3511 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api import distribution_pb2 # type: ignore -from google.api import label_pb2 # type: ignore -from google.api import launch_stage_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.api import monitored_resource_pb2 # type: ignore -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.monitoring_v3.services.metric_service import MetricServiceAsyncClient -from google.cloud.monitoring_v3.services.metric_service import MetricServiceClient -from google.cloud.monitoring_v3.services.metric_service import pagers -from google.cloud.monitoring_v3.services.metric_service import transports -from google.cloud.monitoring_v3.services.metric_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.monitoring_v3.types import common -from google.cloud.monitoring_v3.types import metric as gm_metric -from google.cloud.monitoring_v3.types import metric_service -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MetricServiceClient._get_default_mtls_endpoint(None) is None - assert MetricServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MetricServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MetricServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MetricServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MetricServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - MetricServiceClient, - MetricServiceAsyncClient, -]) -def test_metric_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MetricServiceGrpcTransport, "grpc"), - (transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metric_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - MetricServiceClient, - MetricServiceAsyncClient, -]) -def test_metric_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_metric_service_client_get_transport_class(): - transport = MetricServiceClient.get_transport_class() - available_transports = [ - transports.MetricServiceGrpcTransport, - ] - assert transport in available_transports - - transport = MetricServiceClient.get_transport_class("grpc") - assert transport == transports.MetricServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc"), - (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(MetricServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricServiceClient)) -@mock.patch.object(MetricServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricServiceAsyncClient)) -def test_metric_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MetricServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MetricServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc", "true"), - (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc", "false"), - (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(MetricServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricServiceClient)) -@mock.patch.object(MetricServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_metric_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc"), - (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metric_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricServiceClient, transports.MetricServiceGrpcTransport, "grpc"), - (MetricServiceAsyncClient, transports.MetricServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metric_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_metric_service_client_client_options_from_dict(): - with mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MetricServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_list_monitored_resource_descriptors(transport: str = 'grpc', request_type=metric_service.ListMonitoredResourceDescriptorsRequest): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_monitored_resource_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.ListMonitoredResourceDescriptorsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_monitored_resource_descriptors_from_dict(): - test_list_monitored_resource_descriptors(request_type=dict) - - -def test_list_monitored_resource_descriptors_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - client.list_monitored_resource_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.ListMonitoredResourceDescriptorsRequest() - - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=metric_service.ListMonitoredResourceDescriptorsRequest): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_monitored_resource_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.ListMonitoredResourceDescriptorsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async_from_dict(): - await test_list_monitored_resource_descriptors_async(request_type=dict) - - -def test_list_monitored_resource_descriptors_field_headers(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.ListMonitoredResourceDescriptorsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - call.return_value = metric_service.ListMonitoredResourceDescriptorsResponse() - client.list_monitored_resource_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_field_headers_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.ListMonitoredResourceDescriptorsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMonitoredResourceDescriptorsResponse()) - await client.list_monitored_resource_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_monitored_resource_descriptors_flattened(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.ListMonitoredResourceDescriptorsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_monitored_resource_descriptors( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_list_monitored_resource_descriptors_flattened_error(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_monitored_resource_descriptors( - metric_service.ListMonitoredResourceDescriptorsRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_flattened_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.ListMonitoredResourceDescriptorsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMonitoredResourceDescriptorsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_monitored_resource_descriptors( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_flattened_error_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_monitored_resource_descriptors( - metric_service.ListMonitoredResourceDescriptorsRequest(), - name='name_value', - ) - - -def test_list_monitored_resource_descriptors_pager(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.list_monitored_resource_descriptors(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in results) - -def test_list_monitored_resource_descriptors_pages(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - RuntimeError, - ) - pages = list(client.list_monitored_resource_descriptors(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async_pager(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_monitored_resource_descriptors(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in responses) - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async_pages(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - metric_service.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_monitored_resource_descriptors(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_monitored_resource_descriptor(transport: str = 'grpc', request_type=metric_service.GetMonitoredResourceDescriptorRequest): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_monitored_resource_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = monitored_resource_pb2.MonitoredResourceDescriptor( - name='name_value', - type_='type__value', - display_name='display_name_value', - description='description_value', - launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, - ) - response = client.get_monitored_resource_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.GetMonitoredResourceDescriptorRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, monitored_resource_pb2.MonitoredResourceDescriptor) - assert response.name == 'name_value' - assert response.type_ == 'type__value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED - - -def test_get_monitored_resource_descriptor_from_dict(): - test_get_monitored_resource_descriptor(request_type=dict) - - -def test_get_monitored_resource_descriptor_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_monitored_resource_descriptor), - '__call__') as call: - client.get_monitored_resource_descriptor() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.GetMonitoredResourceDescriptorRequest() - - -@pytest.mark.asyncio -async def test_get_monitored_resource_descriptor_async(transport: str = 'grpc_asyncio', request_type=metric_service.GetMonitoredResourceDescriptorRequest): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_monitored_resource_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(monitored_resource_pb2.MonitoredResourceDescriptor( - name='name_value', - type_='type__value', - display_name='display_name_value', - description='description_value', - launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, - )) - response = await client.get_monitored_resource_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.GetMonitoredResourceDescriptorRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, monitored_resource_pb2.MonitoredResourceDescriptor) - assert response.name == 'name_value' - assert response.type_ == 'type__value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED - - -@pytest.mark.asyncio -async def test_get_monitored_resource_descriptor_async_from_dict(): - await test_get_monitored_resource_descriptor_async(request_type=dict) - - -def test_get_monitored_resource_descriptor_field_headers(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.GetMonitoredResourceDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_monitored_resource_descriptor), - '__call__') as call: - call.return_value = monitored_resource_pb2.MonitoredResourceDescriptor() - client.get_monitored_resource_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_monitored_resource_descriptor_field_headers_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.GetMonitoredResourceDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_monitored_resource_descriptor), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(monitored_resource_pb2.MonitoredResourceDescriptor()) - await client.get_monitored_resource_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_monitored_resource_descriptor_flattened(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_monitored_resource_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = monitored_resource_pb2.MonitoredResourceDescriptor() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_monitored_resource_descriptor( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_monitored_resource_descriptor_flattened_error(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_monitored_resource_descriptor( - metric_service.GetMonitoredResourceDescriptorRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_monitored_resource_descriptor_flattened_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_monitored_resource_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = monitored_resource_pb2.MonitoredResourceDescriptor() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(monitored_resource_pb2.MonitoredResourceDescriptor()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_monitored_resource_descriptor( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_monitored_resource_descriptor_flattened_error_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_monitored_resource_descriptor( - metric_service.GetMonitoredResourceDescriptorRequest(), - name='name_value', - ) - - -def test_list_metric_descriptors(transport: str = 'grpc', request_type=metric_service.ListMetricDescriptorsRequest): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.ListMetricDescriptorsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_metric_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.ListMetricDescriptorsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetricDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_metric_descriptors_from_dict(): - test_list_metric_descriptors(request_type=dict) - - -def test_list_metric_descriptors_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__') as call: - client.list_metric_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.ListMetricDescriptorsRequest() - - -@pytest.mark.asyncio -async def test_list_metric_descriptors_async(transport: str = 'grpc_asyncio', request_type=metric_service.ListMetricDescriptorsRequest): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMetricDescriptorsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_metric_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.ListMetricDescriptorsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetricDescriptorsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_metric_descriptors_async_from_dict(): - await test_list_metric_descriptors_async(request_type=dict) - - -def test_list_metric_descriptors_field_headers(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.ListMetricDescriptorsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__') as call: - call.return_value = metric_service.ListMetricDescriptorsResponse() - client.list_metric_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_metric_descriptors_field_headers_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.ListMetricDescriptorsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMetricDescriptorsResponse()) - await client.list_metric_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_metric_descriptors_flattened(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.ListMetricDescriptorsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_metric_descriptors( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_list_metric_descriptors_flattened_error(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_metric_descriptors( - metric_service.ListMetricDescriptorsRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_list_metric_descriptors_flattened_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.ListMetricDescriptorsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListMetricDescriptorsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_metric_descriptors( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_list_metric_descriptors_flattened_error_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_metric_descriptors( - metric_service.ListMetricDescriptorsRequest(), - name='name_value', - ) - - -def test_list_metric_descriptors_pager(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - ], - next_page_token='abc', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[], - next_page_token='def', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - ], - next_page_token='ghi', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.list_metric_descriptors(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, metric_pb2.MetricDescriptor) - for i in results) - -def test_list_metric_descriptors_pages(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - ], - next_page_token='abc', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[], - next_page_token='def', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - ], - next_page_token='ghi', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - ], - ), - RuntimeError, - ) - pages = list(client.list_metric_descriptors(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_metric_descriptors_async_pager(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - ], - next_page_token='abc', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[], - next_page_token='def', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - ], - next_page_token='ghi', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_metric_descriptors(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metric_pb2.MetricDescriptor) - for i in responses) - -@pytest.mark.asyncio -async def test_list_metric_descriptors_async_pages(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metric_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - ], - next_page_token='abc', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[], - next_page_token='def', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - ], - next_page_token='ghi', - ), - metric_service.ListMetricDescriptorsResponse( - metric_descriptors=[ - metric_pb2.MetricDescriptor(), - metric_pb2.MetricDescriptor(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_metric_descriptors(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_metric_descriptor(transport: str = 'grpc', request_type=metric_service.GetMetricDescriptorRequest): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_pb2.MetricDescriptor( - name='name_value', - type_='type__value', - metric_kind=metric_pb2.MetricDescriptor.MetricKind.GAUGE, - value_type=metric_pb2.MetricDescriptor.ValueType.BOOL, - unit='unit_value', - description='description_value', - display_name='display_name_value', - launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, - monitored_resource_types=['monitored_resource_types_value'], - ) - response = client.get_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.GetMetricDescriptorRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, metric_pb2.MetricDescriptor) - assert response.name == 'name_value' - assert response.type_ == 'type__value' - assert response.metric_kind == metric_pb2.MetricDescriptor.MetricKind.GAUGE - assert response.value_type == metric_pb2.MetricDescriptor.ValueType.BOOL - assert response.unit == 'unit_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED - assert response.monitored_resource_types == ['monitored_resource_types_value'] - - -def test_get_metric_descriptor_from_dict(): - test_get_metric_descriptor(request_type=dict) - - -def test_get_metric_descriptor_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metric_descriptor), - '__call__') as call: - client.get_metric_descriptor() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.GetMetricDescriptorRequest() - - -@pytest.mark.asyncio -async def test_get_metric_descriptor_async(transport: str = 'grpc_asyncio', request_type=metric_service.GetMetricDescriptorRequest): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor( - name='name_value', - type_='type__value', - metric_kind=metric_pb2.MetricDescriptor.MetricKind.GAUGE, - value_type=metric_pb2.MetricDescriptor.ValueType.BOOL, - unit='unit_value', - description='description_value', - display_name='display_name_value', - launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, - monitored_resource_types=['monitored_resource_types_value'], - )) - response = await client.get_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.GetMetricDescriptorRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, metric_pb2.MetricDescriptor) - assert response.name == 'name_value' - assert response.type_ == 'type__value' - assert response.metric_kind == metric_pb2.MetricDescriptor.MetricKind.GAUGE - assert response.value_type == metric_pb2.MetricDescriptor.ValueType.BOOL - assert response.unit == 'unit_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED - assert response.monitored_resource_types == ['monitored_resource_types_value'] - - -@pytest.mark.asyncio -async def test_get_metric_descriptor_async_from_dict(): - await test_get_metric_descriptor_async(request_type=dict) - - -def test_get_metric_descriptor_field_headers(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.GetMetricDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metric_descriptor), - '__call__') as call: - call.return_value = metric_pb2.MetricDescriptor() - client.get_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_metric_descriptor_field_headers_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.GetMetricDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metric_descriptor), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor()) - await client.get_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_metric_descriptor_flattened(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_pb2.MetricDescriptor() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_metric_descriptor( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_metric_descriptor_flattened_error(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_metric_descriptor( - metric_service.GetMetricDescriptorRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_metric_descriptor_flattened_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_pb2.MetricDescriptor() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_metric_descriptor( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_metric_descriptor_flattened_error_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_metric_descriptor( - metric_service.GetMetricDescriptorRequest(), - name='name_value', - ) - - -def test_create_metric_descriptor(transport: str = 'grpc', request_type=metric_service.CreateMetricDescriptorRequest): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_pb2.MetricDescriptor( - name='name_value', - type_='type__value', - metric_kind=metric_pb2.MetricDescriptor.MetricKind.GAUGE, - value_type=metric_pb2.MetricDescriptor.ValueType.BOOL, - unit='unit_value', - description='description_value', - display_name='display_name_value', - launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, - monitored_resource_types=['monitored_resource_types_value'], - ) - response = client.create_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.CreateMetricDescriptorRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, metric_pb2.MetricDescriptor) - assert response.name == 'name_value' - assert response.type_ == 'type__value' - assert response.metric_kind == metric_pb2.MetricDescriptor.MetricKind.GAUGE - assert response.value_type == metric_pb2.MetricDescriptor.ValueType.BOOL - assert response.unit == 'unit_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED - assert response.monitored_resource_types == ['monitored_resource_types_value'] - - -def test_create_metric_descriptor_from_dict(): - test_create_metric_descriptor(request_type=dict) - - -def test_create_metric_descriptor_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metric_descriptor), - '__call__') as call: - client.create_metric_descriptor() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.CreateMetricDescriptorRequest() - - -@pytest.mark.asyncio -async def test_create_metric_descriptor_async(transport: str = 'grpc_asyncio', request_type=metric_service.CreateMetricDescriptorRequest): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor( - name='name_value', - type_='type__value', - metric_kind=metric_pb2.MetricDescriptor.MetricKind.GAUGE, - value_type=metric_pb2.MetricDescriptor.ValueType.BOOL, - unit='unit_value', - description='description_value', - display_name='display_name_value', - launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, - monitored_resource_types=['monitored_resource_types_value'], - )) - response = await client.create_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.CreateMetricDescriptorRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, metric_pb2.MetricDescriptor) - assert response.name == 'name_value' - assert response.type_ == 'type__value' - assert response.metric_kind == metric_pb2.MetricDescriptor.MetricKind.GAUGE - assert response.value_type == metric_pb2.MetricDescriptor.ValueType.BOOL - assert response.unit == 'unit_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED - assert response.monitored_resource_types == ['monitored_resource_types_value'] - - -@pytest.mark.asyncio -async def test_create_metric_descriptor_async_from_dict(): - await test_create_metric_descriptor_async(request_type=dict) - - -def test_create_metric_descriptor_field_headers(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.CreateMetricDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metric_descriptor), - '__call__') as call: - call.return_value = metric_pb2.MetricDescriptor() - client.create_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_metric_descriptor_field_headers_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.CreateMetricDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metric_descriptor), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor()) - await client.create_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_create_metric_descriptor_flattened(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_pb2.MetricDescriptor() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_metric_descriptor( - name='name_value', - metric_descriptor=metric_pb2.MetricDescriptor(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].metric_descriptor == metric_pb2.MetricDescriptor(name='name_value') - - -def test_create_metric_descriptor_flattened_error(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_metric_descriptor( - metric_service.CreateMetricDescriptorRequest(), - name='name_value', - metric_descriptor=metric_pb2.MetricDescriptor(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_metric_descriptor_flattened_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_pb2.MetricDescriptor() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_pb2.MetricDescriptor()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_metric_descriptor( - name='name_value', - metric_descriptor=metric_pb2.MetricDescriptor(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].metric_descriptor == metric_pb2.MetricDescriptor(name='name_value') - - -@pytest.mark.asyncio -async def test_create_metric_descriptor_flattened_error_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_metric_descriptor( - metric_service.CreateMetricDescriptorRequest(), - name='name_value', - metric_descriptor=metric_pb2.MetricDescriptor(name='name_value'), - ) - - -def test_delete_metric_descriptor(transport: str = 'grpc', request_type=metric_service.DeleteMetricDescriptorRequest): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.DeleteMetricDescriptorRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_metric_descriptor_from_dict(): - test_delete_metric_descriptor(request_type=dict) - - -def test_delete_metric_descriptor_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_metric_descriptor), - '__call__') as call: - client.delete_metric_descriptor() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.DeleteMetricDescriptorRequest() - - -@pytest.mark.asyncio -async def test_delete_metric_descriptor_async(transport: str = 'grpc_asyncio', request_type=metric_service.DeleteMetricDescriptorRequest): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.DeleteMetricDescriptorRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_metric_descriptor_async_from_dict(): - await test_delete_metric_descriptor_async(request_type=dict) - - -def test_delete_metric_descriptor_field_headers(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.DeleteMetricDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_metric_descriptor), - '__call__') as call: - call.return_value = None - client.delete_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_metric_descriptor_field_headers_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.DeleteMetricDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_metric_descriptor), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_metric_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_metric_descriptor_flattened(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_metric_descriptor( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_metric_descriptor_flattened_error(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_metric_descriptor( - metric_service.DeleteMetricDescriptorRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_metric_descriptor_flattened_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_metric_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_metric_descriptor( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_metric_descriptor_flattened_error_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_metric_descriptor( - metric_service.DeleteMetricDescriptorRequest(), - name='name_value', - ) - - -def test_list_time_series(transport: str = 'grpc', request_type=metric_service.ListTimeSeriesRequest): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.ListTimeSeriesResponse( - next_page_token='next_page_token_value', - unit='unit_value', - ) - response = client.list_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.ListTimeSeriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTimeSeriesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unit == 'unit_value' - - -def test_list_time_series_from_dict(): - test_list_time_series(request_type=dict) - - -def test_list_time_series_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__') as call: - client.list_time_series() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.ListTimeSeriesRequest() - - -@pytest.mark.asyncio -async def test_list_time_series_async(transport: str = 'grpc_asyncio', request_type=metric_service.ListTimeSeriesRequest): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListTimeSeriesResponse( - next_page_token='next_page_token_value', - unit='unit_value', - )) - response = await client.list_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.ListTimeSeriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTimeSeriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unit == 'unit_value' - - -@pytest.mark.asyncio -async def test_list_time_series_async_from_dict(): - await test_list_time_series_async(request_type=dict) - - -def test_list_time_series_field_headers(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.ListTimeSeriesRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__') as call: - call.return_value = metric_service.ListTimeSeriesResponse() - client.list_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_time_series_field_headers_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.ListTimeSeriesRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListTimeSeriesResponse()) - await client.list_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_time_series_flattened(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.ListTimeSeriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_time_series( - name='name_value', - filter='filter_value', - interval=common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)), - view=metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].filter == 'filter_value' - assert args[0].interval == common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)) - assert args[0].view == metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS - - -def test_list_time_series_flattened_error(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_time_series( - metric_service.ListTimeSeriesRequest(), - name='name_value', - filter='filter_value', - interval=common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)), - view=metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS, - ) - - -@pytest.mark.asyncio -async def test_list_time_series_flattened_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.ListTimeSeriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.ListTimeSeriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_time_series( - name='name_value', - filter='filter_value', - interval=common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)), - view=metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].filter == 'filter_value' - assert args[0].interval == common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)) - assert args[0].view == metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS - - -@pytest.mark.asyncio -async def test_list_time_series_flattened_error_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_time_series( - metric_service.ListTimeSeriesRequest(), - name='name_value', - filter='filter_value', - interval=common.TimeInterval(end_time=timestamp_pb2.Timestamp(seconds=751)), - view=metric_service.ListTimeSeriesRequest.TimeSeriesView.HEADERS, - ) - - -def test_list_time_series_pager(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - ], - next_page_token='abc', - ), - metric_service.ListTimeSeriesResponse( - time_series=[], - next_page_token='def', - ), - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - ], - next_page_token='ghi', - ), - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.list_time_series(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, gm_metric.TimeSeries) - for i in results) - -def test_list_time_series_pages(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - ], - next_page_token='abc', - ), - metric_service.ListTimeSeriesResponse( - time_series=[], - next_page_token='def', - ), - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - ], - next_page_token='ghi', - ), - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - ], - ), - RuntimeError, - ) - pages = list(client.list_time_series(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_time_series_async_pager(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - ], - next_page_token='abc', - ), - metric_service.ListTimeSeriesResponse( - time_series=[], - next_page_token='def', - ), - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - ], - next_page_token='ghi', - ), - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_time_series(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, gm_metric.TimeSeries) - for i in responses) - -@pytest.mark.asyncio -async def test_list_time_series_async_pages(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_time_series), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - ], - next_page_token='abc', - ), - metric_service.ListTimeSeriesResponse( - time_series=[], - next_page_token='def', - ), - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - ], - next_page_token='ghi', - ), - metric_service.ListTimeSeriesResponse( - time_series=[ - gm_metric.TimeSeries(), - gm_metric.TimeSeries(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_time_series(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_create_time_series(transport: str = 'grpc', request_type=metric_service.CreateTimeSeriesRequest): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.create_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.CreateTimeSeriesRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_create_time_series_from_dict(): - test_create_time_series(request_type=dict) - - -def test_create_time_series_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_time_series), - '__call__') as call: - client.create_time_series() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.CreateTimeSeriesRequest() - - -@pytest.mark.asyncio -async def test_create_time_series_async(transport: str = 'grpc_asyncio', request_type=metric_service.CreateTimeSeriesRequest): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.create_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.CreateTimeSeriesRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_create_time_series_async_from_dict(): - await test_create_time_series_async(request_type=dict) - - -def test_create_time_series_field_headers(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.CreateTimeSeriesRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_time_series), - '__call__') as call: - call.return_value = None - client.create_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_time_series_field_headers_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.CreateTimeSeriesRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_time_series), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.create_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_create_time_series_flattened(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_time_series( - name='name_value', - time_series=[gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].time_series == [gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))] - - -def test_create_time_series_flattened_error(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_time_series( - metric_service.CreateTimeSeriesRequest(), - name='name_value', - time_series=[gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))], - ) - - -@pytest.mark.asyncio -async def test_create_time_series_flattened_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_time_series( - name='name_value', - time_series=[gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].time_series == [gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))] - - -@pytest.mark.asyncio -async def test_create_time_series_flattened_error_async(): - client = MetricServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_time_series( - metric_service.CreateTimeSeriesRequest(), - name='name_value', - time_series=[gm_metric.TimeSeries(metric=metric_pb2.Metric(type_='type__value'))], - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MetricServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MetricServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MetricServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetricServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MetricServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetricServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MetricServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MetricServiceGrpcTransport, - transports.MetricServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MetricServiceGrpcTransport, - ) - -def test_metric_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MetricServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_metric_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.MetricServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_monitored_resource_descriptors', - 'get_monitored_resource_descriptor', - 'list_metric_descriptors', - 'get_metric_descriptor', - 'create_metric_descriptor', - 'delete_metric_descriptor', - 'list_time_series', - 'create_time_series', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_metric_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - 'https://www.googleapis.com/auth/monitoring.write', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_metric_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - 'https://www.googleapis.com/auth/monitoring.write', - ), - quota_project_id="octopus", - ) - - -def test_metric_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.metric_service.transports.MetricServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_metric_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetricServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - 'https://www.googleapis.com/auth/monitoring.write', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_metric_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetricServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read', 'https://www.googleapis.com/auth/monitoring.write',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetricServiceGrpcTransport, - transports.MetricServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_metric_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read', 'https://www.googleapis.com/auth/monitoring.write',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetricServiceGrpcTransport, - transports.MetricServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_metric_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - 'https://www.googleapis.com/auth/monitoring.write', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MetricServiceGrpcTransport, grpc_helpers), - (transports.MetricServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_metric_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "monitoring.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - 'https://www.googleapis.com/auth/monitoring.write', -), - scopes=["1", "2"], - default_host="monitoring.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MetricServiceGrpcTransport, transports.MetricServiceGrpcAsyncIOTransport]) -def test_metric_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_metric_service_host_no_port(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), - ) - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_metric_service_host_with_port(): - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), - ) - assert client.transport._host == 'monitoring.googleapis.com:8000' - -def test_metric_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetricServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_metric_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetricServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetricServiceGrpcTransport, transports.MetricServiceGrpcAsyncIOTransport]) -def test_metric_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetricServiceGrpcTransport, transports.MetricServiceGrpcAsyncIOTransport]) -def test_metric_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_metric_descriptor_path(): - project = "squid" - expected = "projects/{project}/metricDescriptors/{metric_descriptor=**}".format(project=project, ) - actual = MetricServiceClient.metric_descriptor_path(project) - assert expected == actual - - -def test_parse_metric_descriptor_path(): - expected = { - "project": "clam", - } - path = MetricServiceClient.metric_descriptor_path(**expected) - - # Check that the path construction is reversible. - actual = MetricServiceClient.parse_metric_descriptor_path(path) - assert expected == actual - -def test_monitored_resource_descriptor_path(): - project = "whelk" - monitored_resource_descriptor = "octopus" - expected = "projects/{project}/monitoredResourceDescriptors/{monitored_resource_descriptor}".format(project=project, monitored_resource_descriptor=monitored_resource_descriptor, ) - actual = MetricServiceClient.monitored_resource_descriptor_path(project, monitored_resource_descriptor) - assert expected == actual - - -def test_parse_monitored_resource_descriptor_path(): - expected = { - "project": "oyster", - "monitored_resource_descriptor": "nudibranch", - } - path = MetricServiceClient.monitored_resource_descriptor_path(**expected) - - # Check that the path construction is reversible. - actual = MetricServiceClient.parse_monitored_resource_descriptor_path(path) - assert expected == actual - -def test_time_series_path(): - project = "cuttlefish" - time_series = "mussel" - expected = "projects/{project}/timeSeries/{time_series}".format(project=project, time_series=time_series, ) - actual = MetricServiceClient.time_series_path(project, time_series) - assert expected == actual - - -def test_parse_time_series_path(): - expected = { - "project": "winkle", - "time_series": "nautilus", - } - path = MetricServiceClient.time_series_path(**expected) - - # Check that the path construction is reversible. - actual = MetricServiceClient.parse_time_series_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MetricServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = MetricServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MetricServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = MetricServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = MetricServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MetricServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MetricServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = MetricServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MetricServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = MetricServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = MetricServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MetricServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MetricServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = MetricServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MetricServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MetricServiceTransport, '_prep_wrapped_messages') as prep: - client = MetricServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MetricServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = MetricServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py deleted file mode 100644 index f9cedb52..00000000 --- a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py +++ /dev/null @@ -1,3757 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api import label_pb2 # type: ignore -from google.api import launch_stage_pb2 # type: ignore -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.monitoring_v3.services.notification_channel_service import NotificationChannelServiceAsyncClient -from google.cloud.monitoring_v3.services.notification_channel_service import NotificationChannelServiceClient -from google.cloud.monitoring_v3.services.notification_channel_service import pagers -from google.cloud.monitoring_v3.services.notification_channel_service import transports -from google.cloud.monitoring_v3.services.notification_channel_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.monitoring_v3.types import mutation_record -from google.cloud.monitoring_v3.types import notification -from google.cloud.monitoring_v3.types import notification_service -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert NotificationChannelServiceClient._get_default_mtls_endpoint(None) is None - assert NotificationChannelServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert NotificationChannelServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert NotificationChannelServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert NotificationChannelServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert NotificationChannelServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - NotificationChannelServiceClient, - NotificationChannelServiceAsyncClient, -]) -def test_notification_channel_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.NotificationChannelServiceGrpcTransport, "grpc"), - (transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_notification_channel_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - NotificationChannelServiceClient, - NotificationChannelServiceAsyncClient, -]) -def test_notification_channel_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_notification_channel_service_client_get_transport_class(): - transport = NotificationChannelServiceClient.get_transport_class() - available_transports = [ - transports.NotificationChannelServiceGrpcTransport, - ] - assert transport in available_transports - - transport = NotificationChannelServiceClient.get_transport_class("grpc") - assert transport == transports.NotificationChannelServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc"), - (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(NotificationChannelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NotificationChannelServiceClient)) -@mock.patch.object(NotificationChannelServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NotificationChannelServiceAsyncClient)) -def test_notification_channel_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(NotificationChannelServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(NotificationChannelServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc", "true"), - (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc", "false"), - (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(NotificationChannelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NotificationChannelServiceClient)) -@mock.patch.object(NotificationChannelServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NotificationChannelServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_notification_channel_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc"), - (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_notification_channel_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (NotificationChannelServiceClient, transports.NotificationChannelServiceGrpcTransport, "grpc"), - (NotificationChannelServiceAsyncClient, transports.NotificationChannelServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_notification_channel_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_notification_channel_service_client_client_options_from_dict(): - with mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = NotificationChannelServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_list_notification_channel_descriptors(transport: str = 'grpc', request_type=notification_service.ListNotificationChannelDescriptorsRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification_service.ListNotificationChannelDescriptorsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_notification_channel_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.ListNotificationChannelDescriptorsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNotificationChannelDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_notification_channel_descriptors_from_dict(): - test_list_notification_channel_descriptors(request_type=dict) - - -def test_list_notification_channel_descriptors_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__') as call: - client.list_notification_channel_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.ListNotificationChannelDescriptorsRequest() - - -@pytest.mark.asyncio -async def test_list_notification_channel_descriptors_async(transport: str = 'grpc_asyncio', request_type=notification_service.ListNotificationChannelDescriptorsRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelDescriptorsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_notification_channel_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.ListNotificationChannelDescriptorsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNotificationChannelDescriptorsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_notification_channel_descriptors_async_from_dict(): - await test_list_notification_channel_descriptors_async(request_type=dict) - - -def test_list_notification_channel_descriptors_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.ListNotificationChannelDescriptorsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__') as call: - call.return_value = notification_service.ListNotificationChannelDescriptorsResponse() - client.list_notification_channel_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_notification_channel_descriptors_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.ListNotificationChannelDescriptorsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelDescriptorsResponse()) - await client.list_notification_channel_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_notification_channel_descriptors_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification_service.ListNotificationChannelDescriptorsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_notification_channel_descriptors( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_list_notification_channel_descriptors_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_notification_channel_descriptors( - notification_service.ListNotificationChannelDescriptorsRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_list_notification_channel_descriptors_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification_service.ListNotificationChannelDescriptorsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelDescriptorsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_notification_channel_descriptors( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_list_notification_channel_descriptors_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_notification_channel_descriptors( - notification_service.ListNotificationChannelDescriptorsRequest(), - name='name_value', - ) - - -def test_list_notification_channel_descriptors_pager(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - ], - next_page_token='abc', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[], - next_page_token='def', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - ], - next_page_token='ghi', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.list_notification_channel_descriptors(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, notification.NotificationChannelDescriptor) - for i in results) - -def test_list_notification_channel_descriptors_pages(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - ], - next_page_token='abc', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[], - next_page_token='def', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - ], - next_page_token='ghi', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - ], - ), - RuntimeError, - ) - pages = list(client.list_notification_channel_descriptors(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_notification_channel_descriptors_async_pager(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - ], - next_page_token='abc', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[], - next_page_token='def', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - ], - next_page_token='ghi', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_notification_channel_descriptors(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, notification.NotificationChannelDescriptor) - for i in responses) - -@pytest.mark.asyncio -async def test_list_notification_channel_descriptors_async_pages(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channel_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - ], - next_page_token='abc', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[], - next_page_token='def', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - ], - next_page_token='ghi', - ), - notification_service.ListNotificationChannelDescriptorsResponse( - channel_descriptors=[ - notification.NotificationChannelDescriptor(), - notification.NotificationChannelDescriptor(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_notification_channel_descriptors(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_notification_channel_descriptor(transport: str = 'grpc', request_type=notification_service.GetNotificationChannelDescriptorRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannelDescriptor( - name='name_value', - type_='type__value', - display_name='display_name_value', - description='description_value', - launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, - ) - response = client.get_notification_channel_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.GetNotificationChannelDescriptorRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannelDescriptor) - assert response.name == 'name_value' - assert response.type_ == 'type__value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED - - -def test_get_notification_channel_descriptor_from_dict(): - test_get_notification_channel_descriptor(request_type=dict) - - -def test_get_notification_channel_descriptor_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_descriptor), - '__call__') as call: - client.get_notification_channel_descriptor() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.GetNotificationChannelDescriptorRequest() - - -@pytest.mark.asyncio -async def test_get_notification_channel_descriptor_async(transport: str = 'grpc_asyncio', request_type=notification_service.GetNotificationChannelDescriptorRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannelDescriptor( - name='name_value', - type_='type__value', - display_name='display_name_value', - description='description_value', - launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, - )) - response = await client.get_notification_channel_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.GetNotificationChannelDescriptorRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannelDescriptor) - assert response.name == 'name_value' - assert response.type_ == 'type__value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED - - -@pytest.mark.asyncio -async def test_get_notification_channel_descriptor_async_from_dict(): - await test_get_notification_channel_descriptor_async(request_type=dict) - - -def test_get_notification_channel_descriptor_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.GetNotificationChannelDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_descriptor), - '__call__') as call: - call.return_value = notification.NotificationChannelDescriptor() - client.get_notification_channel_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_notification_channel_descriptor_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.GetNotificationChannelDescriptorRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_descriptor), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannelDescriptor()) - await client.get_notification_channel_descriptor(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_notification_channel_descriptor_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannelDescriptor() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_notification_channel_descriptor( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_notification_channel_descriptor_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_notification_channel_descriptor( - notification_service.GetNotificationChannelDescriptorRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_notification_channel_descriptor_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_descriptor), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannelDescriptor() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannelDescriptor()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_notification_channel_descriptor( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_notification_channel_descriptor_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_notification_channel_descriptor( - notification_service.GetNotificationChannelDescriptorRequest(), - name='name_value', - ) - - -def test_list_notification_channels(transport: str = 'grpc', request_type=notification_service.ListNotificationChannelsRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification_service.ListNotificationChannelsResponse( - next_page_token='next_page_token_value', - total_size=1086, - ) - response = client.list_notification_channels(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.ListNotificationChannelsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNotificationChannelsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.total_size == 1086 - - -def test_list_notification_channels_from_dict(): - test_list_notification_channels(request_type=dict) - - -def test_list_notification_channels_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__') as call: - client.list_notification_channels() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.ListNotificationChannelsRequest() - - -@pytest.mark.asyncio -async def test_list_notification_channels_async(transport: str = 'grpc_asyncio', request_type=notification_service.ListNotificationChannelsRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelsResponse( - next_page_token='next_page_token_value', - total_size=1086, - )) - response = await client.list_notification_channels(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.ListNotificationChannelsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNotificationChannelsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.total_size == 1086 - - -@pytest.mark.asyncio -async def test_list_notification_channels_async_from_dict(): - await test_list_notification_channels_async(request_type=dict) - - -def test_list_notification_channels_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.ListNotificationChannelsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__') as call: - call.return_value = notification_service.ListNotificationChannelsResponse() - client.list_notification_channels(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_notification_channels_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.ListNotificationChannelsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelsResponse()) - await client.list_notification_channels(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_notification_channels_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification_service.ListNotificationChannelsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_notification_channels( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_list_notification_channels_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_notification_channels( - notification_service.ListNotificationChannelsRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_list_notification_channels_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification_service.ListNotificationChannelsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.ListNotificationChannelsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_notification_channels( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_list_notification_channels_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_notification_channels( - notification_service.ListNotificationChannelsRequest(), - name='name_value', - ) - - -def test_list_notification_channels_pager(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - notification.NotificationChannel(), - notification.NotificationChannel(), - ], - next_page_token='abc', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[], - next_page_token='def', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - ], - next_page_token='ghi', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - notification.NotificationChannel(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.list_notification_channels(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, notification.NotificationChannel) - for i in results) - -def test_list_notification_channels_pages(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - notification.NotificationChannel(), - notification.NotificationChannel(), - ], - next_page_token='abc', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[], - next_page_token='def', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - ], - next_page_token='ghi', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - notification.NotificationChannel(), - ], - ), - RuntimeError, - ) - pages = list(client.list_notification_channels(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_notification_channels_async_pager(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - notification.NotificationChannel(), - notification.NotificationChannel(), - ], - next_page_token='abc', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[], - next_page_token='def', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - ], - next_page_token='ghi', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - notification.NotificationChannel(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_notification_channels(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, notification.NotificationChannel) - for i in responses) - -@pytest.mark.asyncio -async def test_list_notification_channels_async_pages(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_notification_channels), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - notification.NotificationChannel(), - notification.NotificationChannel(), - ], - next_page_token='abc', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[], - next_page_token='def', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - ], - next_page_token='ghi', - ), - notification_service.ListNotificationChannelsResponse( - notification_channels=[ - notification.NotificationChannel(), - notification.NotificationChannel(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_notification_channels(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_notification_channel(transport: str = 'grpc', request_type=notification_service.GetNotificationChannelRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel( - type_='type__value', - name='name_value', - display_name='display_name_value', - description='description_value', - verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, - ) - response = client.get_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.GetNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannel) - assert response.type_ == 'type__value' - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED - - -def test_get_notification_channel_from_dict(): - test_get_notification_channel(request_type=dict) - - -def test_get_notification_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel), - '__call__') as call: - client.get_notification_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.GetNotificationChannelRequest() - - -@pytest.mark.asyncio -async def test_get_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.GetNotificationChannelRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel( - type_='type__value', - name='name_value', - display_name='display_name_value', - description='description_value', - verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, - )) - response = await client.get_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.GetNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannel) - assert response.type_ == 'type__value' - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED - - -@pytest.mark.asyncio -async def test_get_notification_channel_async_from_dict(): - await test_get_notification_channel_async(request_type=dict) - - -def test_get_notification_channel_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.GetNotificationChannelRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel), - '__call__') as call: - call.return_value = notification.NotificationChannel() - client.get_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_notification_channel_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.GetNotificationChannelRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) - await client.get_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_notification_channel_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_notification_channel( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_notification_channel_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_notification_channel( - notification_service.GetNotificationChannelRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_notification_channel_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_notification_channel( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_notification_channel_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_notification_channel( - notification_service.GetNotificationChannelRequest(), - name='name_value', - ) - - -def test_create_notification_channel(transport: str = 'grpc', request_type=notification_service.CreateNotificationChannelRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel( - type_='type__value', - name='name_value', - display_name='display_name_value', - description='description_value', - verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, - ) - response = client.create_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.CreateNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannel) - assert response.type_ == 'type__value' - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED - - -def test_create_notification_channel_from_dict(): - test_create_notification_channel(request_type=dict) - - -def test_create_notification_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_notification_channel), - '__call__') as call: - client.create_notification_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.CreateNotificationChannelRequest() - - -@pytest.mark.asyncio -async def test_create_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.CreateNotificationChannelRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel( - type_='type__value', - name='name_value', - display_name='display_name_value', - description='description_value', - verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, - )) - response = await client.create_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.CreateNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannel) - assert response.type_ == 'type__value' - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED - - -@pytest.mark.asyncio -async def test_create_notification_channel_async_from_dict(): - await test_create_notification_channel_async(request_type=dict) - - -def test_create_notification_channel_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.CreateNotificationChannelRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_notification_channel), - '__call__') as call: - call.return_value = notification.NotificationChannel() - client.create_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_notification_channel_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.CreateNotificationChannelRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_notification_channel), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) - await client.create_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_create_notification_channel_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_notification_channel( - name='name_value', - notification_channel=notification.NotificationChannel(type_='type__value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].notification_channel == notification.NotificationChannel(type_='type__value') - - -def test_create_notification_channel_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_notification_channel( - notification_service.CreateNotificationChannelRequest(), - name='name_value', - notification_channel=notification.NotificationChannel(type_='type__value'), - ) - - -@pytest.mark.asyncio -async def test_create_notification_channel_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_notification_channel( - name='name_value', - notification_channel=notification.NotificationChannel(type_='type__value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].notification_channel == notification.NotificationChannel(type_='type__value') - - -@pytest.mark.asyncio -async def test_create_notification_channel_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_notification_channel( - notification_service.CreateNotificationChannelRequest(), - name='name_value', - notification_channel=notification.NotificationChannel(type_='type__value'), - ) - - -def test_update_notification_channel(transport: str = 'grpc', request_type=notification_service.UpdateNotificationChannelRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel( - type_='type__value', - name='name_value', - display_name='display_name_value', - description='description_value', - verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, - ) - response = client.update_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.UpdateNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannel) - assert response.type_ == 'type__value' - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED - - -def test_update_notification_channel_from_dict(): - test_update_notification_channel(request_type=dict) - - -def test_update_notification_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_notification_channel), - '__call__') as call: - client.update_notification_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.UpdateNotificationChannelRequest() - - -@pytest.mark.asyncio -async def test_update_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.UpdateNotificationChannelRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel( - type_='type__value', - name='name_value', - display_name='display_name_value', - description='description_value', - verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, - )) - response = await client.update_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.UpdateNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannel) - assert response.type_ == 'type__value' - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED - - -@pytest.mark.asyncio -async def test_update_notification_channel_async_from_dict(): - await test_update_notification_channel_async(request_type=dict) - - -def test_update_notification_channel_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.UpdateNotificationChannelRequest() - - request.notification_channel.name = 'notification_channel.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_notification_channel), - '__call__') as call: - call.return_value = notification.NotificationChannel() - client.update_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'notification_channel.name=notification_channel.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_notification_channel_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.UpdateNotificationChannelRequest() - - request.notification_channel.name = 'notification_channel.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_notification_channel), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) - await client.update_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'notification_channel.name=notification_channel.name/value', - ) in kw['metadata'] - - -def test_update_notification_channel_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_notification_channel( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - notification_channel=notification.NotificationChannel(type_='type__value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - assert args[0].notification_channel == notification.NotificationChannel(type_='type__value') - - -def test_update_notification_channel_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_notification_channel( - notification_service.UpdateNotificationChannelRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - notification_channel=notification.NotificationChannel(type_='type__value'), - ) - - -@pytest.mark.asyncio -async def test_update_notification_channel_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_notification_channel( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - notification_channel=notification.NotificationChannel(type_='type__value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - assert args[0].notification_channel == notification.NotificationChannel(type_='type__value') - - -@pytest.mark.asyncio -async def test_update_notification_channel_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_notification_channel( - notification_service.UpdateNotificationChannelRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - notification_channel=notification.NotificationChannel(type_='type__value'), - ) - - -def test_delete_notification_channel(transport: str = 'grpc', request_type=notification_service.DeleteNotificationChannelRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.DeleteNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_notification_channel_from_dict(): - test_delete_notification_channel(request_type=dict) - - -def test_delete_notification_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_notification_channel), - '__call__') as call: - client.delete_notification_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.DeleteNotificationChannelRequest() - - -@pytest.mark.asyncio -async def test_delete_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.DeleteNotificationChannelRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.DeleteNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_notification_channel_async_from_dict(): - await test_delete_notification_channel_async(request_type=dict) - - -def test_delete_notification_channel_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.DeleteNotificationChannelRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_notification_channel), - '__call__') as call: - call.return_value = None - client.delete_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_notification_channel_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.DeleteNotificationChannelRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_notification_channel), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_notification_channel_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_notification_channel( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].force == True - - -def test_delete_notification_channel_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_notification_channel( - notification_service.DeleteNotificationChannelRequest(), - name='name_value', - force=True, - ) - - -@pytest.mark.asyncio -async def test_delete_notification_channel_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_notification_channel( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].force == True - - -@pytest.mark.asyncio -async def test_delete_notification_channel_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_notification_channel( - notification_service.DeleteNotificationChannelRequest(), - name='name_value', - force=True, - ) - - -def test_send_notification_channel_verification_code(transport: str = 'grpc', request_type=notification_service.SendNotificationChannelVerificationCodeRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.send_notification_channel_verification_code), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.send_notification_channel_verification_code(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.SendNotificationChannelVerificationCodeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_send_notification_channel_verification_code_from_dict(): - test_send_notification_channel_verification_code(request_type=dict) - - -def test_send_notification_channel_verification_code_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.send_notification_channel_verification_code), - '__call__') as call: - client.send_notification_channel_verification_code() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.SendNotificationChannelVerificationCodeRequest() - - -@pytest.mark.asyncio -async def test_send_notification_channel_verification_code_async(transport: str = 'grpc_asyncio', request_type=notification_service.SendNotificationChannelVerificationCodeRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.send_notification_channel_verification_code), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.send_notification_channel_verification_code(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.SendNotificationChannelVerificationCodeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_send_notification_channel_verification_code_async_from_dict(): - await test_send_notification_channel_verification_code_async(request_type=dict) - - -def test_send_notification_channel_verification_code_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.SendNotificationChannelVerificationCodeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.send_notification_channel_verification_code), - '__call__') as call: - call.return_value = None - client.send_notification_channel_verification_code(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_send_notification_channel_verification_code_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.SendNotificationChannelVerificationCodeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.send_notification_channel_verification_code), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.send_notification_channel_verification_code(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_send_notification_channel_verification_code_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.send_notification_channel_verification_code), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.send_notification_channel_verification_code( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_send_notification_channel_verification_code_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.send_notification_channel_verification_code( - notification_service.SendNotificationChannelVerificationCodeRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_send_notification_channel_verification_code_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.send_notification_channel_verification_code), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.send_notification_channel_verification_code( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_send_notification_channel_verification_code_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.send_notification_channel_verification_code( - notification_service.SendNotificationChannelVerificationCodeRequest(), - name='name_value', - ) - - -def test_get_notification_channel_verification_code(transport: str = 'grpc', request_type=notification_service.GetNotificationChannelVerificationCodeRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_verification_code), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification_service.GetNotificationChannelVerificationCodeResponse( - code='code_value', - ) - response = client.get_notification_channel_verification_code(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.GetNotificationChannelVerificationCodeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification_service.GetNotificationChannelVerificationCodeResponse) - assert response.code == 'code_value' - - -def test_get_notification_channel_verification_code_from_dict(): - test_get_notification_channel_verification_code(request_type=dict) - - -def test_get_notification_channel_verification_code_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_verification_code), - '__call__') as call: - client.get_notification_channel_verification_code() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.GetNotificationChannelVerificationCodeRequest() - - -@pytest.mark.asyncio -async def test_get_notification_channel_verification_code_async(transport: str = 'grpc_asyncio', request_type=notification_service.GetNotificationChannelVerificationCodeRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_verification_code), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification_service.GetNotificationChannelVerificationCodeResponse( - code='code_value', - )) - response = await client.get_notification_channel_verification_code(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.GetNotificationChannelVerificationCodeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification_service.GetNotificationChannelVerificationCodeResponse) - assert response.code == 'code_value' - - -@pytest.mark.asyncio -async def test_get_notification_channel_verification_code_async_from_dict(): - await test_get_notification_channel_verification_code_async(request_type=dict) - - -def test_get_notification_channel_verification_code_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.GetNotificationChannelVerificationCodeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_verification_code), - '__call__') as call: - call.return_value = notification_service.GetNotificationChannelVerificationCodeResponse() - client.get_notification_channel_verification_code(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_notification_channel_verification_code_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.GetNotificationChannelVerificationCodeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_verification_code), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.GetNotificationChannelVerificationCodeResponse()) - await client.get_notification_channel_verification_code(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_notification_channel_verification_code_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_verification_code), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification_service.GetNotificationChannelVerificationCodeResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_notification_channel_verification_code( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_notification_channel_verification_code_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_notification_channel_verification_code( - notification_service.GetNotificationChannelVerificationCodeRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_notification_channel_verification_code_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_notification_channel_verification_code), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification_service.GetNotificationChannelVerificationCodeResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification_service.GetNotificationChannelVerificationCodeResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_notification_channel_verification_code( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_notification_channel_verification_code_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_notification_channel_verification_code( - notification_service.GetNotificationChannelVerificationCodeRequest(), - name='name_value', - ) - - -def test_verify_notification_channel(transport: str = 'grpc', request_type=notification_service.VerifyNotificationChannelRequest): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel( - type_='type__value', - name='name_value', - display_name='display_name_value', - description='description_value', - verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, - ) - response = client.verify_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.VerifyNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannel) - assert response.type_ == 'type__value' - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED - - -def test_verify_notification_channel_from_dict(): - test_verify_notification_channel(request_type=dict) - - -def test_verify_notification_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_notification_channel), - '__call__') as call: - client.verify_notification_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.VerifyNotificationChannelRequest() - - -@pytest.mark.asyncio -async def test_verify_notification_channel_async(transport: str = 'grpc_asyncio', request_type=notification_service.VerifyNotificationChannelRequest): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel( - type_='type__value', - name='name_value', - display_name='display_name_value', - description='description_value', - verification_status=notification.NotificationChannel.VerificationStatus.UNVERIFIED, - )) - response = await client.verify_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == notification_service.VerifyNotificationChannelRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, notification.NotificationChannel) - assert response.type_ == 'type__value' - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.verification_status == notification.NotificationChannel.VerificationStatus.UNVERIFIED - - -@pytest.mark.asyncio -async def test_verify_notification_channel_async_from_dict(): - await test_verify_notification_channel_async(request_type=dict) - - -def test_verify_notification_channel_field_headers(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.VerifyNotificationChannelRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_notification_channel), - '__call__') as call: - call.return_value = notification.NotificationChannel() - client.verify_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_verify_notification_channel_field_headers_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = notification_service.VerifyNotificationChannelRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_notification_channel), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) - await client.verify_notification_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_verify_notification_channel_flattened(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.verify_notification_channel( - name='name_value', - code='code_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].code == 'code_value' - - -def test_verify_notification_channel_flattened_error(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.verify_notification_channel( - notification_service.VerifyNotificationChannelRequest(), - name='name_value', - code='code_value', - ) - - -@pytest.mark.asyncio -async def test_verify_notification_channel_flattened_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_notification_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = notification.NotificationChannel() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(notification.NotificationChannel()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.verify_notification_channel( - name='name_value', - code='code_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].code == 'code_value' - - -@pytest.mark.asyncio -async def test_verify_notification_channel_flattened_error_async(): - client = NotificationChannelServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.verify_notification_channel( - notification_service.VerifyNotificationChannelRequest(), - name='name_value', - code='code_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.NotificationChannelServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.NotificationChannelServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = NotificationChannelServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.NotificationChannelServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = NotificationChannelServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.NotificationChannelServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = NotificationChannelServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.NotificationChannelServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.NotificationChannelServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.NotificationChannelServiceGrpcTransport, - transports.NotificationChannelServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.NotificationChannelServiceGrpcTransport, - ) - -def test_notification_channel_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.NotificationChannelServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_notification_channel_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.NotificationChannelServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_notification_channel_descriptors', - 'get_notification_channel_descriptor', - 'list_notification_channels', - 'get_notification_channel', - 'create_notification_channel', - 'update_notification_channel', - 'delete_notification_channel', - 'send_notification_channel_verification_code', - 'get_notification_channel_verification_code', - 'verify_notification_channel', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_notification_channel_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NotificationChannelServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_notification_channel_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NotificationChannelServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ), - quota_project_id="octopus", - ) - - -def test_notification_channel_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.notification_channel_service.transports.NotificationChannelServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NotificationChannelServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_notification_channel_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - NotificationChannelServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_notification_channel_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - NotificationChannelServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.NotificationChannelServiceGrpcTransport, - transports.NotificationChannelServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_notification_channel_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.NotificationChannelServiceGrpcTransport, - transports.NotificationChannelServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_notification_channel_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.NotificationChannelServiceGrpcTransport, grpc_helpers), - (transports.NotificationChannelServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_notification_channel_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "monitoring.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - scopes=["1", "2"], - default_host="monitoring.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.NotificationChannelServiceGrpcTransport, transports.NotificationChannelServiceGrpcAsyncIOTransport]) -def test_notification_channel_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_notification_channel_service_host_no_port(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), - ) - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_notification_channel_service_host_with_port(): - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), - ) - assert client.transport._host == 'monitoring.googleapis.com:8000' - -def test_notification_channel_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.NotificationChannelServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_notification_channel_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.NotificationChannelServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.NotificationChannelServiceGrpcTransport, transports.NotificationChannelServiceGrpcAsyncIOTransport]) -def test_notification_channel_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.NotificationChannelServiceGrpcTransport, transports.NotificationChannelServiceGrpcAsyncIOTransport]) -def test_notification_channel_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_notification_channel_path(): - project = "squid" - notification_channel = "clam" - expected = "projects/{project}/notificationChannels/{notification_channel}".format(project=project, notification_channel=notification_channel, ) - actual = NotificationChannelServiceClient.notification_channel_path(project, notification_channel) - assert expected == actual - - -def test_parse_notification_channel_path(): - expected = { - "project": "whelk", - "notification_channel": "octopus", - } - path = NotificationChannelServiceClient.notification_channel_path(**expected) - - # Check that the path construction is reversible. - actual = NotificationChannelServiceClient.parse_notification_channel_path(path) - assert expected == actual - -def test_notification_channel_descriptor_path(): - project = "oyster" - channel_descriptor = "nudibranch" - expected = "projects/{project}/notificationChannelDescriptors/{channel_descriptor}".format(project=project, channel_descriptor=channel_descriptor, ) - actual = NotificationChannelServiceClient.notification_channel_descriptor_path(project, channel_descriptor) - assert expected == actual - - -def test_parse_notification_channel_descriptor_path(): - expected = { - "project": "cuttlefish", - "channel_descriptor": "mussel", - } - path = NotificationChannelServiceClient.notification_channel_descriptor_path(**expected) - - # Check that the path construction is reversible. - actual = NotificationChannelServiceClient.parse_notification_channel_descriptor_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = NotificationChannelServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = NotificationChannelServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = NotificationChannelServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = NotificationChannelServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = NotificationChannelServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = NotificationChannelServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = NotificationChannelServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = NotificationChannelServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = NotificationChannelServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = NotificationChannelServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = NotificationChannelServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = NotificationChannelServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = NotificationChannelServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = NotificationChannelServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = NotificationChannelServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.NotificationChannelServiceTransport, '_prep_wrapped_messages') as prep: - client = NotificationChannelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.NotificationChannelServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = NotificationChannelServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_query_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_query_service.py deleted file mode 100644 index 54868cab..00000000 --- a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_query_service.py +++ /dev/null @@ -1,1264 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.monitoring_v3.services.query_service import QueryServiceAsyncClient -from google.cloud.monitoring_v3.services.query_service import QueryServiceClient -from google.cloud.monitoring_v3.services.query_service import pagers -from google.cloud.monitoring_v3.services.query_service import transports -from google.cloud.monitoring_v3.services.query_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.monitoring_v3.types import metric -from google.cloud.monitoring_v3.types import metric_service -from google.oauth2 import service_account -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert QueryServiceClient._get_default_mtls_endpoint(None) is None - assert QueryServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert QueryServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert QueryServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert QueryServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert QueryServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - QueryServiceClient, - QueryServiceAsyncClient, -]) -def test_query_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.QueryServiceGrpcTransport, "grpc"), - (transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_query_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - QueryServiceClient, - QueryServiceAsyncClient, -]) -def test_query_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_query_service_client_get_transport_class(): - transport = QueryServiceClient.get_transport_class() - available_transports = [ - transports.QueryServiceGrpcTransport, - ] - assert transport in available_transports - - transport = QueryServiceClient.get_transport_class("grpc") - assert transport == transports.QueryServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc"), - (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(QueryServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QueryServiceClient)) -@mock.patch.object(QueryServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QueryServiceAsyncClient)) -def test_query_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(QueryServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(QueryServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc", "true"), - (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc", "false"), - (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(QueryServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QueryServiceClient)) -@mock.patch.object(QueryServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QueryServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_query_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc"), - (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_query_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (QueryServiceClient, transports.QueryServiceGrpcTransport, "grpc"), - (QueryServiceAsyncClient, transports.QueryServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_query_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_query_service_client_client_options_from_dict(): - with mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = QueryServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_query_time_series(transport: str = 'grpc', request_type=metric_service.QueryTimeSeriesRequest): - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metric_service.QueryTimeSeriesResponse( - next_page_token='next_page_token_value', - ) - response = client.query_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.QueryTimeSeriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.QueryTimeSeriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_query_time_series_from_dict(): - test_query_time_series(request_type=dict) - - -def test_query_time_series_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_time_series), - '__call__') as call: - client.query_time_series() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.QueryTimeSeriesRequest() - - -@pytest.mark.asyncio -async def test_query_time_series_async(transport: str = 'grpc_asyncio', request_type=metric_service.QueryTimeSeriesRequest): - client = QueryServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_time_series), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metric_service.QueryTimeSeriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.query_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metric_service.QueryTimeSeriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.QueryTimeSeriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_query_time_series_async_from_dict(): - await test_query_time_series_async(request_type=dict) - - -def test_query_time_series_field_headers(): - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.QueryTimeSeriesRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_time_series), - '__call__') as call: - call.return_value = metric_service.QueryTimeSeriesResponse() - client.query_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_query_time_series_field_headers_async(): - client = QueryServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metric_service.QueryTimeSeriesRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_time_series), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metric_service.QueryTimeSeriesResponse()) - await client.query_time_series(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_query_time_series_pager(): - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_time_series), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - metric.TimeSeriesData(), - metric.TimeSeriesData(), - ], - next_page_token='abc', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[], - next_page_token='def', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - ], - next_page_token='ghi', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - metric.TimeSeriesData(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.query_time_series(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, metric.TimeSeriesData) - for i in results) - -def test_query_time_series_pages(): - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_time_series), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - metric.TimeSeriesData(), - metric.TimeSeriesData(), - ], - next_page_token='abc', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[], - next_page_token='def', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - ], - next_page_token='ghi', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - metric.TimeSeriesData(), - ], - ), - RuntimeError, - ) - pages = list(client.query_time_series(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_query_time_series_async_pager(): - client = QueryServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_time_series), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - metric.TimeSeriesData(), - metric.TimeSeriesData(), - ], - next_page_token='abc', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[], - next_page_token='def', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - ], - next_page_token='ghi', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - metric.TimeSeriesData(), - ], - ), - RuntimeError, - ) - async_pager = await client.query_time_series(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metric.TimeSeriesData) - for i in responses) - -@pytest.mark.asyncio -async def test_query_time_series_async_pages(): - client = QueryServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_time_series), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - metric.TimeSeriesData(), - metric.TimeSeriesData(), - ], - next_page_token='abc', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[], - next_page_token='def', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - ], - next_page_token='ghi', - ), - metric_service.QueryTimeSeriesResponse( - time_series_data=[ - metric.TimeSeriesData(), - metric.TimeSeriesData(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.query_time_series(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.QueryServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.QueryServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = QueryServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.QueryServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = QueryServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.QueryServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = QueryServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.QueryServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.QueryServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.QueryServiceGrpcTransport, - transports.QueryServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.QueryServiceGrpcTransport, - ) - -def test_query_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.QueryServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_query_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.QueryServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'query_time_series', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_query_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.QueryServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_query_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.QueryServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ), - quota_project_id="octopus", - ) - - -def test_query_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.query_service.transports.QueryServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.QueryServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_query_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - QueryServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_query_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - QueryServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.QueryServiceGrpcTransport, - transports.QueryServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_query_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.QueryServiceGrpcTransport, - transports.QueryServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_query_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.QueryServiceGrpcTransport, grpc_helpers), - (transports.QueryServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_query_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "monitoring.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - scopes=["1", "2"], - default_host="monitoring.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.QueryServiceGrpcTransport, transports.QueryServiceGrpcAsyncIOTransport]) -def test_query_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_query_service_host_no_port(): - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), - ) - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_query_service_host_with_port(): - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), - ) - assert client.transport._host == 'monitoring.googleapis.com:8000' - -def test_query_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.QueryServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_query_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.QueryServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.QueryServiceGrpcTransport, transports.QueryServiceGrpcAsyncIOTransport]) -def test_query_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.QueryServiceGrpcTransport, transports.QueryServiceGrpcAsyncIOTransport]) -def test_query_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = QueryServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = QueryServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = QueryServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = QueryServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = QueryServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = QueryServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = QueryServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = QueryServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = QueryServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = QueryServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = QueryServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = QueryServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = QueryServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = QueryServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = QueryServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.QueryServiceTransport, '_prep_wrapped_messages') as prep: - client = QueryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.QueryServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = QueryServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py deleted file mode 100644 index e1cbc2e9..00000000 --- a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_service_monitoring_service.py +++ /dev/null @@ -1,3703 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.monitoring_v3.services.service_monitoring_service import ServiceMonitoringServiceAsyncClient -from google.cloud.monitoring_v3.services.service_monitoring_service import ServiceMonitoringServiceClient -from google.cloud.monitoring_v3.services.service_monitoring_service import pagers -from google.cloud.monitoring_v3.services.service_monitoring_service import transports -from google.cloud.monitoring_v3.services.service_monitoring_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.monitoring_v3.types import service -from google.cloud.monitoring_v3.types import service as gm_service -from google.cloud.monitoring_v3.types import service_service -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.type import calendar_period_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(None) is None - assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ServiceMonitoringServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - ServiceMonitoringServiceClient, - ServiceMonitoringServiceAsyncClient, -]) -def test_service_monitoring_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ServiceMonitoringServiceGrpcTransport, "grpc"), - (transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_service_monitoring_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - ServiceMonitoringServiceClient, - ServiceMonitoringServiceAsyncClient, -]) -def test_service_monitoring_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_service_monitoring_service_client_get_transport_class(): - transport = ServiceMonitoringServiceClient.get_transport_class() - available_transports = [ - transports.ServiceMonitoringServiceGrpcTransport, - ] - assert transport in available_transports - - transport = ServiceMonitoringServiceClient.get_transport_class("grpc") - assert transport == transports.ServiceMonitoringServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc"), - (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(ServiceMonitoringServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceMonitoringServiceClient)) -@mock.patch.object(ServiceMonitoringServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceMonitoringServiceAsyncClient)) -def test_service_monitoring_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(ServiceMonitoringServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ServiceMonitoringServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc", "true"), - (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc", "false"), - (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(ServiceMonitoringServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceMonitoringServiceClient)) -@mock.patch.object(ServiceMonitoringServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceMonitoringServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_service_monitoring_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc"), - (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_service_monitoring_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ServiceMonitoringServiceClient, transports.ServiceMonitoringServiceGrpcTransport, "grpc"), - (ServiceMonitoringServiceAsyncClient, transports.ServiceMonitoringServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_service_monitoring_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_service_monitoring_service_client_client_options_from_dict(): - with mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = ServiceMonitoringServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_create_service(transport: str = 'grpc', request_type=service_service.CreateServiceRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_service.Service( - name='name_value', - display_name='display_name_value', - custom=None, - ) - response = client.create_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.CreateServiceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gm_service.Service) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -def test_create_service_from_dict(): - test_create_service(request_type=dict) - - -def test_create_service_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service), - '__call__') as call: - client.create_service() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.CreateServiceRequest() - - -@pytest.mark.asyncio -async def test_create_service_async(transport: str = 'grpc_asyncio', request_type=service_service.CreateServiceRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service( - name='name_value', - display_name='display_name_value', - )) - response = await client.create_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.CreateServiceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gm_service.Service) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -@pytest.mark.asyncio -async def test_create_service_async_from_dict(): - await test_create_service_async(request_type=dict) - - -def test_create_service_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.CreateServiceRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service), - '__call__') as call: - call.return_value = gm_service.Service() - client.create_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_service_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.CreateServiceRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service()) - await client.create_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_service_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_service.Service() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_service( - parent='parent_value', - service=gm_service.Service(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].service == gm_service.Service(name='name_value') - - -def test_create_service_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_service( - service_service.CreateServiceRequest(), - parent='parent_value', - service=gm_service.Service(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_service_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_service.Service() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_service( - parent='parent_value', - service=gm_service.Service(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].service == gm_service.Service(name='name_value') - - -@pytest.mark.asyncio -async def test_create_service_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_service( - service_service.CreateServiceRequest(), - parent='parent_value', - service=gm_service.Service(name='name_value'), - ) - - -def test_get_service(transport: str = 'grpc', request_type=service_service.GetServiceRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.Service( - name='name_value', - display_name='display_name_value', - custom=None, - ) - response = client.get_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.GetServiceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, service.Service) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -def test_get_service_from_dict(): - test_get_service(request_type=dict) - - -def test_get_service_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service), - '__call__') as call: - client.get_service() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.GetServiceRequest() - - -@pytest.mark.asyncio -async def test_get_service_async(transport: str = 'grpc_asyncio', request_type=service_service.GetServiceRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.Service( - name='name_value', - display_name='display_name_value', - )) - response = await client.get_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.GetServiceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, service.Service) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -@pytest.mark.asyncio -async def test_get_service_async_from_dict(): - await test_get_service_async(request_type=dict) - - -def test_get_service_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.GetServiceRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service), - '__call__') as call: - call.return_value = service.Service() - client.get_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_service_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.GetServiceRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Service()) - await client.get_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_service_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.Service() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_service( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_service_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_service( - service_service.GetServiceRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_service_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.Service() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Service()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_service( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_service_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_service( - service_service.GetServiceRequest(), - name='name_value', - ) - - -def test_list_services(transport: str = 'grpc', request_type=service_service.ListServicesRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service_service.ListServicesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_services(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.ListServicesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListServicesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_services_from_dict(): - test_list_services(request_type=dict) - - -def test_list_services_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__') as call: - client.list_services() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.ListServicesRequest() - - -@pytest.mark.asyncio -async def test_list_services_async(transport: str = 'grpc_asyncio', request_type=service_service.ListServicesRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServicesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_services(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.ListServicesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListServicesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_services_async_from_dict(): - await test_list_services_async(request_type=dict) - - -def test_list_services_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.ListServicesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__') as call: - call.return_value = service_service.ListServicesResponse() - client.list_services(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_services_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.ListServicesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServicesResponse()) - await client.list_services(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_services_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service_service.ListServicesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_services( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_services_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_services( - service_service.ListServicesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_services_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service_service.ListServicesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServicesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_services( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_services_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_services( - service_service.ListServicesRequest(), - parent='parent_value', - ) - - -def test_list_services_pager(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service_service.ListServicesResponse( - services=[ - service.Service(), - service.Service(), - service.Service(), - ], - next_page_token='abc', - ), - service_service.ListServicesResponse( - services=[], - next_page_token='def', - ), - service_service.ListServicesResponse( - services=[ - service.Service(), - ], - next_page_token='ghi', - ), - service_service.ListServicesResponse( - services=[ - service.Service(), - service.Service(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_services(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, service.Service) - for i in results) - -def test_list_services_pages(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service_service.ListServicesResponse( - services=[ - service.Service(), - service.Service(), - service.Service(), - ], - next_page_token='abc', - ), - service_service.ListServicesResponse( - services=[], - next_page_token='def', - ), - service_service.ListServicesResponse( - services=[ - service.Service(), - ], - next_page_token='ghi', - ), - service_service.ListServicesResponse( - services=[ - service.Service(), - service.Service(), - ], - ), - RuntimeError, - ) - pages = list(client.list_services(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_services_async_pager(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service_service.ListServicesResponse( - services=[ - service.Service(), - service.Service(), - service.Service(), - ], - next_page_token='abc', - ), - service_service.ListServicesResponse( - services=[], - next_page_token='def', - ), - service_service.ListServicesResponse( - services=[ - service.Service(), - ], - next_page_token='ghi', - ), - service_service.ListServicesResponse( - services=[ - service.Service(), - service.Service(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_services(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, service.Service) - for i in responses) - -@pytest.mark.asyncio -async def test_list_services_async_pages(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_services), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service_service.ListServicesResponse( - services=[ - service.Service(), - service.Service(), - service.Service(), - ], - next_page_token='abc', - ), - service_service.ListServicesResponse( - services=[], - next_page_token='def', - ), - service_service.ListServicesResponse( - services=[ - service.Service(), - ], - next_page_token='ghi', - ), - service_service.ListServicesResponse( - services=[ - service.Service(), - service.Service(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_services(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_update_service(transport: str = 'grpc', request_type=service_service.UpdateServiceRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_service.Service( - name='name_value', - display_name='display_name_value', - custom=None, - ) - response = client.update_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.UpdateServiceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gm_service.Service) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -def test_update_service_from_dict(): - test_update_service(request_type=dict) - - -def test_update_service_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service), - '__call__') as call: - client.update_service() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.UpdateServiceRequest() - - -@pytest.mark.asyncio -async def test_update_service_async(transport: str = 'grpc_asyncio', request_type=service_service.UpdateServiceRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service( - name='name_value', - display_name='display_name_value', - )) - response = await client.update_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.UpdateServiceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gm_service.Service) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -@pytest.mark.asyncio -async def test_update_service_async_from_dict(): - await test_update_service_async(request_type=dict) - - -def test_update_service_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.UpdateServiceRequest() - - request.service.name = 'service.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service), - '__call__') as call: - call.return_value = gm_service.Service() - client.update_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'service.name=service.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_service_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.UpdateServiceRequest() - - request.service.name = 'service.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service()) - await client.update_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'service.name=service.name/value', - ) in kw['metadata'] - - -def test_update_service_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_service.Service() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_service( - service=gm_service.Service(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].service == gm_service.Service(name='name_value') - - -def test_update_service_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_service( - service_service.UpdateServiceRequest(), - service=gm_service.Service(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_update_service_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gm_service.Service() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gm_service.Service()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_service( - service=gm_service.Service(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].service == gm_service.Service(name='name_value') - - -@pytest.mark.asyncio -async def test_update_service_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_service( - service_service.UpdateServiceRequest(), - service=gm_service.Service(name='name_value'), - ) - - -def test_delete_service(transport: str = 'grpc', request_type=service_service.DeleteServiceRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.DeleteServiceRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_service_from_dict(): - test_delete_service(request_type=dict) - - -def test_delete_service_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service), - '__call__') as call: - client.delete_service() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.DeleteServiceRequest() - - -@pytest.mark.asyncio -async def test_delete_service_async(transport: str = 'grpc_asyncio', request_type=service_service.DeleteServiceRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.DeleteServiceRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_service_async_from_dict(): - await test_delete_service_async(request_type=dict) - - -def test_delete_service_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.DeleteServiceRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service), - '__call__') as call: - call.return_value = None - client.delete_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_service_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.DeleteServiceRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_service_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_service( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_service_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_service( - service_service.DeleteServiceRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_service_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_service( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_service_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_service( - service_service.DeleteServiceRequest(), - name='name_value', - ) - - -def test_create_service_level_objective(transport: str = 'grpc', request_type=service_service.CreateServiceLevelObjectiveRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ServiceLevelObjective( - name='name_value', - display_name='display_name_value', - goal=0.419, - rolling_period=duration_pb2.Duration(seconds=751), - ) - response = client.create_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.CreateServiceLevelObjectiveRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, service.ServiceLevelObjective) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert math.isclose(response.goal, 0.419, rel_tol=1e-6) - - -def test_create_service_level_objective_from_dict(): - test_create_service_level_objective(request_type=dict) - - -def test_create_service_level_objective_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service_level_objective), - '__call__') as call: - client.create_service_level_objective() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.CreateServiceLevelObjectiveRequest() - - -@pytest.mark.asyncio -async def test_create_service_level_objective_async(transport: str = 'grpc_asyncio', request_type=service_service.CreateServiceLevelObjectiveRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective( - name='name_value', - display_name='display_name_value', - goal=0.419, - )) - response = await client.create_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.CreateServiceLevelObjectiveRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, service.ServiceLevelObjective) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert math.isclose(response.goal, 0.419, rel_tol=1e-6) - - -@pytest.mark.asyncio -async def test_create_service_level_objective_async_from_dict(): - await test_create_service_level_objective_async(request_type=dict) - - -def test_create_service_level_objective_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.CreateServiceLevelObjectiveRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service_level_objective), - '__call__') as call: - call.return_value = service.ServiceLevelObjective() - client.create_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_service_level_objective_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.CreateServiceLevelObjectiveRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service_level_objective), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) - await client.create_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_service_level_objective_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ServiceLevelObjective() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_service_level_objective( - parent='parent_value', - service_level_objective=service.ServiceLevelObjective(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].service_level_objective == service.ServiceLevelObjective(name='name_value') - - -def test_create_service_level_objective_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_service_level_objective( - service_service.CreateServiceLevelObjectiveRequest(), - parent='parent_value', - service_level_objective=service.ServiceLevelObjective(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_service_level_objective_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ServiceLevelObjective() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_service_level_objective( - parent='parent_value', - service_level_objective=service.ServiceLevelObjective(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].service_level_objective == service.ServiceLevelObjective(name='name_value') - - -@pytest.mark.asyncio -async def test_create_service_level_objective_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_service_level_objective( - service_service.CreateServiceLevelObjectiveRequest(), - parent='parent_value', - service_level_objective=service.ServiceLevelObjective(name='name_value'), - ) - - -def test_get_service_level_objective(transport: str = 'grpc', request_type=service_service.GetServiceLevelObjectiveRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ServiceLevelObjective( - name='name_value', - display_name='display_name_value', - goal=0.419, - rolling_period=duration_pb2.Duration(seconds=751), - ) - response = client.get_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.GetServiceLevelObjectiveRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, service.ServiceLevelObjective) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert math.isclose(response.goal, 0.419, rel_tol=1e-6) - - -def test_get_service_level_objective_from_dict(): - test_get_service_level_objective(request_type=dict) - - -def test_get_service_level_objective_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service_level_objective), - '__call__') as call: - client.get_service_level_objective() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.GetServiceLevelObjectiveRequest() - - -@pytest.mark.asyncio -async def test_get_service_level_objective_async(transport: str = 'grpc_asyncio', request_type=service_service.GetServiceLevelObjectiveRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective( - name='name_value', - display_name='display_name_value', - goal=0.419, - )) - response = await client.get_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.GetServiceLevelObjectiveRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, service.ServiceLevelObjective) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert math.isclose(response.goal, 0.419, rel_tol=1e-6) - - -@pytest.mark.asyncio -async def test_get_service_level_objective_async_from_dict(): - await test_get_service_level_objective_async(request_type=dict) - - -def test_get_service_level_objective_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.GetServiceLevelObjectiveRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service_level_objective), - '__call__') as call: - call.return_value = service.ServiceLevelObjective() - client.get_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_service_level_objective_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.GetServiceLevelObjectiveRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service_level_objective), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) - await client.get_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_service_level_objective_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ServiceLevelObjective() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_service_level_objective( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_service_level_objective_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_service_level_objective( - service_service.GetServiceLevelObjectiveRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_service_level_objective_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ServiceLevelObjective() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_service_level_objective( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_service_level_objective_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_service_level_objective( - service_service.GetServiceLevelObjectiveRequest(), - name='name_value', - ) - - -def test_list_service_level_objectives(transport: str = 'grpc', request_type=service_service.ListServiceLevelObjectivesRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service_service.ListServiceLevelObjectivesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_service_level_objectives(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.ListServiceLevelObjectivesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListServiceLevelObjectivesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_service_level_objectives_from_dict(): - test_list_service_level_objectives(request_type=dict) - - -def test_list_service_level_objectives_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__') as call: - client.list_service_level_objectives() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.ListServiceLevelObjectivesRequest() - - -@pytest.mark.asyncio -async def test_list_service_level_objectives_async(transport: str = 'grpc_asyncio', request_type=service_service.ListServiceLevelObjectivesRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServiceLevelObjectivesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_service_level_objectives(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.ListServiceLevelObjectivesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListServiceLevelObjectivesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_service_level_objectives_async_from_dict(): - await test_list_service_level_objectives_async(request_type=dict) - - -def test_list_service_level_objectives_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.ListServiceLevelObjectivesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__') as call: - call.return_value = service_service.ListServiceLevelObjectivesResponse() - client.list_service_level_objectives(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_service_level_objectives_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.ListServiceLevelObjectivesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServiceLevelObjectivesResponse()) - await client.list_service_level_objectives(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_service_level_objectives_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service_service.ListServiceLevelObjectivesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_service_level_objectives( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_service_level_objectives_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_service_level_objectives( - service_service.ListServiceLevelObjectivesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_service_level_objectives_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service_service.ListServiceLevelObjectivesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_service.ListServiceLevelObjectivesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_service_level_objectives( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_service_level_objectives_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_service_level_objectives( - service_service.ListServiceLevelObjectivesRequest(), - parent='parent_value', - ) - - -def test_list_service_level_objectives_pager(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - ], - next_page_token='abc', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[], - next_page_token='def', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - ], - next_page_token='ghi', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_service_level_objectives(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, service.ServiceLevelObjective) - for i in results) - -def test_list_service_level_objectives_pages(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - ], - next_page_token='abc', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[], - next_page_token='def', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - ], - next_page_token='ghi', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - ], - ), - RuntimeError, - ) - pages = list(client.list_service_level_objectives(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_service_level_objectives_async_pager(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - ], - next_page_token='abc', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[], - next_page_token='def', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - ], - next_page_token='ghi', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_service_level_objectives(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, service.ServiceLevelObjective) - for i in responses) - -@pytest.mark.asyncio -async def test_list_service_level_objectives_async_pages(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_service_level_objectives), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - ], - next_page_token='abc', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[], - next_page_token='def', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - ], - next_page_token='ghi', - ), - service_service.ListServiceLevelObjectivesResponse( - service_level_objectives=[ - service.ServiceLevelObjective(), - service.ServiceLevelObjective(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_service_level_objectives(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_update_service_level_objective(transport: str = 'grpc', request_type=service_service.UpdateServiceLevelObjectiveRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ServiceLevelObjective( - name='name_value', - display_name='display_name_value', - goal=0.419, - rolling_period=duration_pb2.Duration(seconds=751), - ) - response = client.update_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.UpdateServiceLevelObjectiveRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, service.ServiceLevelObjective) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert math.isclose(response.goal, 0.419, rel_tol=1e-6) - - -def test_update_service_level_objective_from_dict(): - test_update_service_level_objective(request_type=dict) - - -def test_update_service_level_objective_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service_level_objective), - '__call__') as call: - client.update_service_level_objective() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.UpdateServiceLevelObjectiveRequest() - - -@pytest.mark.asyncio -async def test_update_service_level_objective_async(transport: str = 'grpc_asyncio', request_type=service_service.UpdateServiceLevelObjectiveRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective( - name='name_value', - display_name='display_name_value', - goal=0.419, - )) - response = await client.update_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.UpdateServiceLevelObjectiveRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, service.ServiceLevelObjective) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert math.isclose(response.goal, 0.419, rel_tol=1e-6) - - -@pytest.mark.asyncio -async def test_update_service_level_objective_async_from_dict(): - await test_update_service_level_objective_async(request_type=dict) - - -def test_update_service_level_objective_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.UpdateServiceLevelObjectiveRequest() - - request.service_level_objective.name = 'service_level_objective.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service_level_objective), - '__call__') as call: - call.return_value = service.ServiceLevelObjective() - client.update_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'service_level_objective.name=service_level_objective.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_service_level_objective_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.UpdateServiceLevelObjectiveRequest() - - request.service_level_objective.name = 'service_level_objective.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service_level_objective), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) - await client.update_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'service_level_objective.name=service_level_objective.name/value', - ) in kw['metadata'] - - -def test_update_service_level_objective_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ServiceLevelObjective() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_service_level_objective( - service_level_objective=service.ServiceLevelObjective(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].service_level_objective == service.ServiceLevelObjective(name='name_value') - - -def test_update_service_level_objective_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_service_level_objective( - service_service.UpdateServiceLevelObjectiveRequest(), - service_level_objective=service.ServiceLevelObjective(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_update_service_level_objective_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ServiceLevelObjective() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ServiceLevelObjective()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_service_level_objective( - service_level_objective=service.ServiceLevelObjective(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].service_level_objective == service.ServiceLevelObjective(name='name_value') - - -@pytest.mark.asyncio -async def test_update_service_level_objective_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_service_level_objective( - service_service.UpdateServiceLevelObjectiveRequest(), - service_level_objective=service.ServiceLevelObjective(name='name_value'), - ) - - -def test_delete_service_level_objective(transport: str = 'grpc', request_type=service_service.DeleteServiceLevelObjectiveRequest): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.DeleteServiceLevelObjectiveRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_service_level_objective_from_dict(): - test_delete_service_level_objective(request_type=dict) - - -def test_delete_service_level_objective_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service_level_objective), - '__call__') as call: - client.delete_service_level_objective() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.DeleteServiceLevelObjectiveRequest() - - -@pytest.mark.asyncio -async def test_delete_service_level_objective_async(transport: str = 'grpc_asyncio', request_type=service_service.DeleteServiceLevelObjectiveRequest): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == service_service.DeleteServiceLevelObjectiveRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_service_level_objective_async_from_dict(): - await test_delete_service_level_objective_async(request_type=dict) - - -def test_delete_service_level_objective_field_headers(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.DeleteServiceLevelObjectiveRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service_level_objective), - '__call__') as call: - call.return_value = None - client.delete_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_service_level_objective_field_headers_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service_service.DeleteServiceLevelObjectiveRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service_level_objective), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_service_level_objective(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_service_level_objective_flattened(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_service_level_objective( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_service_level_objective_flattened_error(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_service_level_objective( - service_service.DeleteServiceLevelObjectiveRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_service_level_objective_flattened_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_service_level_objective), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_service_level_objective( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_service_level_objective_flattened_error_async(): - client = ServiceMonitoringServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_service_level_objective( - service_service.DeleteServiceLevelObjectiveRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ServiceMonitoringServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ServiceMonitoringServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ServiceMonitoringServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ServiceMonitoringServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ServiceMonitoringServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ServiceMonitoringServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ServiceMonitoringServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ServiceMonitoringServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ServiceMonitoringServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ServiceMonitoringServiceGrpcTransport, - transports.ServiceMonitoringServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ServiceMonitoringServiceGrpcTransport, - ) - -def test_service_monitoring_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ServiceMonitoringServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_service_monitoring_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.ServiceMonitoringServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_service', - 'get_service', - 'list_services', - 'update_service', - 'delete_service', - 'create_service_level_objective', - 'get_service_level_objective', - 'list_service_level_objectives', - 'update_service_level_objective', - 'delete_service_level_objective', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_service_monitoring_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ServiceMonitoringServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_service_monitoring_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ServiceMonitoringServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ), - quota_project_id="octopus", - ) - - -def test_service_monitoring_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.service_monitoring_service.transports.ServiceMonitoringServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ServiceMonitoringServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_service_monitoring_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ServiceMonitoringServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_service_monitoring_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ServiceMonitoringServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ServiceMonitoringServiceGrpcTransport, - transports.ServiceMonitoringServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_service_monitoring_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ServiceMonitoringServiceGrpcTransport, - transports.ServiceMonitoringServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_service_monitoring_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ServiceMonitoringServiceGrpcTransport, grpc_helpers), - (transports.ServiceMonitoringServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_service_monitoring_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "monitoring.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - scopes=["1", "2"], - default_host="monitoring.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ServiceMonitoringServiceGrpcTransport, transports.ServiceMonitoringServiceGrpcAsyncIOTransport]) -def test_service_monitoring_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_service_monitoring_service_host_no_port(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), - ) - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_service_monitoring_service_host_with_port(): - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), - ) - assert client.transport._host == 'monitoring.googleapis.com:8000' - -def test_service_monitoring_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ServiceMonitoringServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_service_monitoring_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ServiceMonitoringServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ServiceMonitoringServiceGrpcTransport, transports.ServiceMonitoringServiceGrpcAsyncIOTransport]) -def test_service_monitoring_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ServiceMonitoringServiceGrpcTransport, transports.ServiceMonitoringServiceGrpcAsyncIOTransport]) -def test_service_monitoring_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_service_path(): - project = "squid" - service = "clam" - expected = "projects/{project}/services/{service}".format(project=project, service=service, ) - actual = ServiceMonitoringServiceClient.service_path(project, service) - assert expected == actual - - -def test_parse_service_path(): - expected = { - "project": "whelk", - "service": "octopus", - } - path = ServiceMonitoringServiceClient.service_path(**expected) - - # Check that the path construction is reversible. - actual = ServiceMonitoringServiceClient.parse_service_path(path) - assert expected == actual - -def test_service_level_objective_path(): - project = "oyster" - service = "nudibranch" - service_level_objective = "cuttlefish" - expected = "projects/{project}/services/{service}/serviceLevelObjectives/{service_level_objective}".format(project=project, service=service, service_level_objective=service_level_objective, ) - actual = ServiceMonitoringServiceClient.service_level_objective_path(project, service, service_level_objective) - assert expected == actual - - -def test_parse_service_level_objective_path(): - expected = { - "project": "mussel", - "service": "winkle", - "service_level_objective": "nautilus", - } - path = ServiceMonitoringServiceClient.service_level_objective_path(**expected) - - # Check that the path construction is reversible. - actual = ServiceMonitoringServiceClient.parse_service_level_objective_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ServiceMonitoringServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = ServiceMonitoringServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ServiceMonitoringServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = ServiceMonitoringServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = ServiceMonitoringServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ServiceMonitoringServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ServiceMonitoringServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = ServiceMonitoringServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ServiceMonitoringServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = ServiceMonitoringServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = ServiceMonitoringServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ServiceMonitoringServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ServiceMonitoringServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = ServiceMonitoringServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ServiceMonitoringServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.ServiceMonitoringServiceTransport, '_prep_wrapped_messages') as prep: - client = ServiceMonitoringServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.ServiceMonitoringServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = ServiceMonitoringServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py b/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py deleted file mode 100644 index 54e5cb8c..00000000 --- a/owl-bot-staging/v3/tests/unit/gapic/monitoring_v3/test_uptime_check_service.py +++ /dev/null @@ -1,2610 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api import monitored_resource_pb2 # type: ignore -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.monitoring_v3.services.uptime_check_service import UptimeCheckServiceAsyncClient -from google.cloud.monitoring_v3.services.uptime_check_service import UptimeCheckServiceClient -from google.cloud.monitoring_v3.services.uptime_check_service import pagers -from google.cloud.monitoring_v3.services.uptime_check_service import transports -from google.cloud.monitoring_v3.services.uptime_check_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.monitoring_v3.types import uptime -from google.cloud.monitoring_v3.types import uptime_service -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert UptimeCheckServiceClient._get_default_mtls_endpoint(None) is None - assert UptimeCheckServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert UptimeCheckServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert UptimeCheckServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert UptimeCheckServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert UptimeCheckServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - UptimeCheckServiceClient, - UptimeCheckServiceAsyncClient, -]) -def test_uptime_check_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.UptimeCheckServiceGrpcTransport, "grpc"), - (transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_uptime_check_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - UptimeCheckServiceClient, - UptimeCheckServiceAsyncClient, -]) -def test_uptime_check_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_uptime_check_service_client_get_transport_class(): - transport = UptimeCheckServiceClient.get_transport_class() - available_transports = [ - transports.UptimeCheckServiceGrpcTransport, - ] - assert transport in available_transports - - transport = UptimeCheckServiceClient.get_transport_class("grpc") - assert transport == transports.UptimeCheckServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc"), - (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(UptimeCheckServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UptimeCheckServiceClient)) -@mock.patch.object(UptimeCheckServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UptimeCheckServiceAsyncClient)) -def test_uptime_check_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(UptimeCheckServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(UptimeCheckServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc", "true"), - (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc", "false"), - (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(UptimeCheckServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UptimeCheckServiceClient)) -@mock.patch.object(UptimeCheckServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UptimeCheckServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_uptime_check_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc"), - (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_uptime_check_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (UptimeCheckServiceClient, transports.UptimeCheckServiceGrpcTransport, "grpc"), - (UptimeCheckServiceAsyncClient, transports.UptimeCheckServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_uptime_check_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_uptime_check_service_client_client_options_from_dict(): - with mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = UptimeCheckServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_list_uptime_check_configs(transport: str = 'grpc', request_type=uptime_service.ListUptimeCheckConfigsRequest): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime_service.ListUptimeCheckConfigsResponse( - next_page_token='next_page_token_value', - total_size=1086, - ) - response = client.list_uptime_check_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.ListUptimeCheckConfigsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUptimeCheckConfigsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.total_size == 1086 - - -def test_list_uptime_check_configs_from_dict(): - test_list_uptime_check_configs(request_type=dict) - - -def test_list_uptime_check_configs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__') as call: - client.list_uptime_check_configs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.ListUptimeCheckConfigsRequest() - - -@pytest.mark.asyncio -async def test_list_uptime_check_configs_async(transport: str = 'grpc_asyncio', request_type=uptime_service.ListUptimeCheckConfigsRequest): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime_service.ListUptimeCheckConfigsResponse( - next_page_token='next_page_token_value', - total_size=1086, - )) - response = await client.list_uptime_check_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.ListUptimeCheckConfigsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUptimeCheckConfigsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.total_size == 1086 - - -@pytest.mark.asyncio -async def test_list_uptime_check_configs_async_from_dict(): - await test_list_uptime_check_configs_async(request_type=dict) - - -def test_list_uptime_check_configs_field_headers(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.ListUptimeCheckConfigsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__') as call: - call.return_value = uptime_service.ListUptimeCheckConfigsResponse() - client.list_uptime_check_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_uptime_check_configs_field_headers_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.ListUptimeCheckConfigsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime_service.ListUptimeCheckConfigsResponse()) - await client.list_uptime_check_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_uptime_check_configs_flattened(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime_service.ListUptimeCheckConfigsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_uptime_check_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_uptime_check_configs_flattened_error(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_uptime_check_configs( - uptime_service.ListUptimeCheckConfigsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_uptime_check_configs_flattened_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime_service.ListUptimeCheckConfigsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime_service.ListUptimeCheckConfigsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_uptime_check_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_uptime_check_configs_flattened_error_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_uptime_check_configs( - uptime_service.ListUptimeCheckConfigsRequest(), - parent='parent_value', - ) - - -def test_list_uptime_check_configs_pager(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - ], - next_page_token='abc', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[], - next_page_token='def', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - ], - next_page_token='ghi', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_uptime_check_configs(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, uptime.UptimeCheckConfig) - for i in results) - -def test_list_uptime_check_configs_pages(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - ], - next_page_token='abc', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[], - next_page_token='def', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - ], - next_page_token='ghi', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - ], - ), - RuntimeError, - ) - pages = list(client.list_uptime_check_configs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_uptime_check_configs_async_pager(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - ], - next_page_token='abc', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[], - next_page_token='def', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - ], - next_page_token='ghi', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_uptime_check_configs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, uptime.UptimeCheckConfig) - for i in responses) - -@pytest.mark.asyncio -async def test_list_uptime_check_configs_async_pages(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - ], - next_page_token='abc', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[], - next_page_token='def', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - ], - next_page_token='ghi', - ), - uptime_service.ListUptimeCheckConfigsResponse( - uptime_check_configs=[ - uptime.UptimeCheckConfig(), - uptime.UptimeCheckConfig(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_uptime_check_configs(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_uptime_check_config(transport: str = 'grpc', request_type=uptime_service.GetUptimeCheckConfigRequest): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime.UptimeCheckConfig( - name='name_value', - display_name='display_name_value', - selected_regions=[uptime.UptimeCheckRegion.USA], - is_internal=True, - monitored_resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), - http_check=uptime.UptimeCheckConfig.HttpCheck(request_method=uptime.UptimeCheckConfig.HttpCheck.RequestMethod.GET), - ) - response = client.get_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.GetUptimeCheckConfigRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, uptime.UptimeCheckConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.selected_regions == [uptime.UptimeCheckRegion.USA] - assert response.is_internal is True - - -def test_get_uptime_check_config_from_dict(): - test_get_uptime_check_config(request_type=dict) - - -def test_get_uptime_check_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_uptime_check_config), - '__call__') as call: - client.get_uptime_check_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.GetUptimeCheckConfigRequest() - - -@pytest.mark.asyncio -async def test_get_uptime_check_config_async(transport: str = 'grpc_asyncio', request_type=uptime_service.GetUptimeCheckConfigRequest): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig( - name='name_value', - display_name='display_name_value', - selected_regions=[uptime.UptimeCheckRegion.USA], - is_internal=True, - )) - response = await client.get_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.GetUptimeCheckConfigRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, uptime.UptimeCheckConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.selected_regions == [uptime.UptimeCheckRegion.USA] - assert response.is_internal is True - - -@pytest.mark.asyncio -async def test_get_uptime_check_config_async_from_dict(): - await test_get_uptime_check_config_async(request_type=dict) - - -def test_get_uptime_check_config_field_headers(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.GetUptimeCheckConfigRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_uptime_check_config), - '__call__') as call: - call.return_value = uptime.UptimeCheckConfig() - client.get_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_uptime_check_config_field_headers_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.GetUptimeCheckConfigRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_uptime_check_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) - await client.get_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_uptime_check_config_flattened(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime.UptimeCheckConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_uptime_check_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_uptime_check_config_flattened_error(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_uptime_check_config( - uptime_service.GetUptimeCheckConfigRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_uptime_check_config_flattened_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime.UptimeCheckConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_uptime_check_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_uptime_check_config_flattened_error_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_uptime_check_config( - uptime_service.GetUptimeCheckConfigRequest(), - name='name_value', - ) - - -def test_create_uptime_check_config(transport: str = 'grpc', request_type=uptime_service.CreateUptimeCheckConfigRequest): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime.UptimeCheckConfig( - name='name_value', - display_name='display_name_value', - selected_regions=[uptime.UptimeCheckRegion.USA], - is_internal=True, - monitored_resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), - http_check=uptime.UptimeCheckConfig.HttpCheck(request_method=uptime.UptimeCheckConfig.HttpCheck.RequestMethod.GET), - ) - response = client.create_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.CreateUptimeCheckConfigRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, uptime.UptimeCheckConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.selected_regions == [uptime.UptimeCheckRegion.USA] - assert response.is_internal is True - - -def test_create_uptime_check_config_from_dict(): - test_create_uptime_check_config(request_type=dict) - - -def test_create_uptime_check_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_uptime_check_config), - '__call__') as call: - client.create_uptime_check_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.CreateUptimeCheckConfigRequest() - - -@pytest.mark.asyncio -async def test_create_uptime_check_config_async(transport: str = 'grpc_asyncio', request_type=uptime_service.CreateUptimeCheckConfigRequest): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig( - name='name_value', - display_name='display_name_value', - selected_regions=[uptime.UptimeCheckRegion.USA], - is_internal=True, - )) - response = await client.create_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.CreateUptimeCheckConfigRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, uptime.UptimeCheckConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.selected_regions == [uptime.UptimeCheckRegion.USA] - assert response.is_internal is True - - -@pytest.mark.asyncio -async def test_create_uptime_check_config_async_from_dict(): - await test_create_uptime_check_config_async(request_type=dict) - - -def test_create_uptime_check_config_field_headers(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.CreateUptimeCheckConfigRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_uptime_check_config), - '__call__') as call: - call.return_value = uptime.UptimeCheckConfig() - client.create_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_uptime_check_config_field_headers_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.CreateUptimeCheckConfigRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_uptime_check_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) - await client.create_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_uptime_check_config_flattened(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime.UptimeCheckConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_uptime_check_config( - parent='parent_value', - uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].uptime_check_config == uptime.UptimeCheckConfig(name='name_value') - - -def test_create_uptime_check_config_flattened_error(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_uptime_check_config( - uptime_service.CreateUptimeCheckConfigRequest(), - parent='parent_value', - uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_uptime_check_config_flattened_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime.UptimeCheckConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_uptime_check_config( - parent='parent_value', - uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].uptime_check_config == uptime.UptimeCheckConfig(name='name_value') - - -@pytest.mark.asyncio -async def test_create_uptime_check_config_flattened_error_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_uptime_check_config( - uptime_service.CreateUptimeCheckConfigRequest(), - parent='parent_value', - uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), - ) - - -def test_update_uptime_check_config(transport: str = 'grpc', request_type=uptime_service.UpdateUptimeCheckConfigRequest): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime.UptimeCheckConfig( - name='name_value', - display_name='display_name_value', - selected_regions=[uptime.UptimeCheckRegion.USA], - is_internal=True, - monitored_resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), - http_check=uptime.UptimeCheckConfig.HttpCheck(request_method=uptime.UptimeCheckConfig.HttpCheck.RequestMethod.GET), - ) - response = client.update_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.UpdateUptimeCheckConfigRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, uptime.UptimeCheckConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.selected_regions == [uptime.UptimeCheckRegion.USA] - assert response.is_internal is True - - -def test_update_uptime_check_config_from_dict(): - test_update_uptime_check_config(request_type=dict) - - -def test_update_uptime_check_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_uptime_check_config), - '__call__') as call: - client.update_uptime_check_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.UpdateUptimeCheckConfigRequest() - - -@pytest.mark.asyncio -async def test_update_uptime_check_config_async(transport: str = 'grpc_asyncio', request_type=uptime_service.UpdateUptimeCheckConfigRequest): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig( - name='name_value', - display_name='display_name_value', - selected_regions=[uptime.UptimeCheckRegion.USA], - is_internal=True, - )) - response = await client.update_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.UpdateUptimeCheckConfigRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, uptime.UptimeCheckConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.selected_regions == [uptime.UptimeCheckRegion.USA] - assert response.is_internal is True - - -@pytest.mark.asyncio -async def test_update_uptime_check_config_async_from_dict(): - await test_update_uptime_check_config_async(request_type=dict) - - -def test_update_uptime_check_config_field_headers(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.UpdateUptimeCheckConfigRequest() - - request.uptime_check_config.name = 'uptime_check_config.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_uptime_check_config), - '__call__') as call: - call.return_value = uptime.UptimeCheckConfig() - client.update_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'uptime_check_config.name=uptime_check_config.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_uptime_check_config_field_headers_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.UpdateUptimeCheckConfigRequest() - - request.uptime_check_config.name = 'uptime_check_config.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_uptime_check_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) - await client.update_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'uptime_check_config.name=uptime_check_config.name/value', - ) in kw['metadata'] - - -def test_update_uptime_check_config_flattened(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime.UptimeCheckConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_uptime_check_config( - uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].uptime_check_config == uptime.UptimeCheckConfig(name='name_value') - - -def test_update_uptime_check_config_flattened_error(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_uptime_check_config( - uptime_service.UpdateUptimeCheckConfigRequest(), - uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_update_uptime_check_config_flattened_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime.UptimeCheckConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(uptime.UptimeCheckConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_uptime_check_config( - uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].uptime_check_config == uptime.UptimeCheckConfig(name='name_value') - - -@pytest.mark.asyncio -async def test_update_uptime_check_config_flattened_error_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_uptime_check_config( - uptime_service.UpdateUptimeCheckConfigRequest(), - uptime_check_config=uptime.UptimeCheckConfig(name='name_value'), - ) - - -def test_delete_uptime_check_config(transport: str = 'grpc', request_type=uptime_service.DeleteUptimeCheckConfigRequest): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.DeleteUptimeCheckConfigRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_uptime_check_config_from_dict(): - test_delete_uptime_check_config(request_type=dict) - - -def test_delete_uptime_check_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_uptime_check_config), - '__call__') as call: - client.delete_uptime_check_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.DeleteUptimeCheckConfigRequest() - - -@pytest.mark.asyncio -async def test_delete_uptime_check_config_async(transport: str = 'grpc_asyncio', request_type=uptime_service.DeleteUptimeCheckConfigRequest): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.DeleteUptimeCheckConfigRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_uptime_check_config_async_from_dict(): - await test_delete_uptime_check_config_async(request_type=dict) - - -def test_delete_uptime_check_config_field_headers(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.DeleteUptimeCheckConfigRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_uptime_check_config), - '__call__') as call: - call.return_value = None - client.delete_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_uptime_check_config_field_headers_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = uptime_service.DeleteUptimeCheckConfigRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_uptime_check_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_uptime_check_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_uptime_check_config_flattened(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_uptime_check_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_uptime_check_config_flattened_error(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_uptime_check_config( - uptime_service.DeleteUptimeCheckConfigRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_uptime_check_config_flattened_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_uptime_check_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_uptime_check_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_uptime_check_config_flattened_error_async(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_uptime_check_config( - uptime_service.DeleteUptimeCheckConfigRequest(), - name='name_value', - ) - - -def test_list_uptime_check_ips(transport: str = 'grpc', request_type=uptime_service.ListUptimeCheckIpsRequest): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_ips), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = uptime_service.ListUptimeCheckIpsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_uptime_check_ips(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.ListUptimeCheckIpsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUptimeCheckIpsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_uptime_check_ips_from_dict(): - test_list_uptime_check_ips(request_type=dict) - - -def test_list_uptime_check_ips_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_ips), - '__call__') as call: - client.list_uptime_check_ips() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.ListUptimeCheckIpsRequest() - - -@pytest.mark.asyncio -async def test_list_uptime_check_ips_async(transport: str = 'grpc_asyncio', request_type=uptime_service.ListUptimeCheckIpsRequest): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_ips), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(uptime_service.ListUptimeCheckIpsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_uptime_check_ips(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == uptime_service.ListUptimeCheckIpsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUptimeCheckIpsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_uptime_check_ips_async_from_dict(): - await test_list_uptime_check_ips_async(request_type=dict) - - -def test_list_uptime_check_ips_pager(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_ips), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - ], - next_page_token='abc', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[], - next_page_token='def', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - ], - next_page_token='ghi', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.list_uptime_check_ips(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, uptime.UptimeCheckIp) - for i in results) - -def test_list_uptime_check_ips_pages(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_ips), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - ], - next_page_token='abc', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[], - next_page_token='def', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - ], - next_page_token='ghi', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - ], - ), - RuntimeError, - ) - pages = list(client.list_uptime_check_ips(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_uptime_check_ips_async_pager(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_ips), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - ], - next_page_token='abc', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[], - next_page_token='def', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - ], - next_page_token='ghi', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_uptime_check_ips(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, uptime.UptimeCheckIp) - for i in responses) - -@pytest.mark.asyncio -async def test_list_uptime_check_ips_async_pages(): - client = UptimeCheckServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_uptime_check_ips), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - ], - next_page_token='abc', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[], - next_page_token='def', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - ], - next_page_token='ghi', - ), - uptime_service.ListUptimeCheckIpsResponse( - uptime_check_ips=[ - uptime.UptimeCheckIp(), - uptime.UptimeCheckIp(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_uptime_check_ips(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.UptimeCheckServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.UptimeCheckServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = UptimeCheckServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.UptimeCheckServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = UptimeCheckServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.UptimeCheckServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = UptimeCheckServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.UptimeCheckServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.UptimeCheckServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.UptimeCheckServiceGrpcTransport, - transports.UptimeCheckServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.UptimeCheckServiceGrpcTransport, - ) - -def test_uptime_check_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.UptimeCheckServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_uptime_check_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.UptimeCheckServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_uptime_check_configs', - 'get_uptime_check_config', - 'create_uptime_check_config', - 'update_uptime_check_config', - 'delete_uptime_check_config', - 'list_uptime_check_ips', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_uptime_check_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.UptimeCheckServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_uptime_check_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.UptimeCheckServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', - ), - quota_project_id="octopus", - ) - - -def test_uptime_check_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.monitoring_v3.services.uptime_check_service.transports.UptimeCheckServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.UptimeCheckServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_uptime_check_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - UptimeCheckServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_uptime_check_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - UptimeCheckServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.UptimeCheckServiceGrpcTransport, - transports.UptimeCheckServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_uptime_check_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/monitoring', 'https://www.googleapis.com/auth/monitoring.read',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.UptimeCheckServiceGrpcTransport, - transports.UptimeCheckServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_uptime_check_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.UptimeCheckServiceGrpcTransport, grpc_helpers), - (transports.UptimeCheckServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_uptime_check_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "monitoring.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/monitoring', - 'https://www.googleapis.com/auth/monitoring.read', -), - scopes=["1", "2"], - default_host="monitoring.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.UptimeCheckServiceGrpcTransport, transports.UptimeCheckServiceGrpcAsyncIOTransport]) -def test_uptime_check_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_uptime_check_service_host_no_port(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com'), - ) - assert client.transport._host == 'monitoring.googleapis.com:443' - - -def test_uptime_check_service_host_with_port(): - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='monitoring.googleapis.com:8000'), - ) - assert client.transport._host == 'monitoring.googleapis.com:8000' - -def test_uptime_check_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.UptimeCheckServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_uptime_check_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.UptimeCheckServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.UptimeCheckServiceGrpcTransport, transports.UptimeCheckServiceGrpcAsyncIOTransport]) -def test_uptime_check_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.UptimeCheckServiceGrpcTransport, transports.UptimeCheckServiceGrpcAsyncIOTransport]) -def test_uptime_check_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_uptime_check_config_path(): - project = "squid" - uptime_check_config = "clam" - expected = "projects/{project}/uptimeCheckConfigs/{uptime_check_config}".format(project=project, uptime_check_config=uptime_check_config, ) - actual = UptimeCheckServiceClient.uptime_check_config_path(project, uptime_check_config) - assert expected == actual - - -def test_parse_uptime_check_config_path(): - expected = { - "project": "whelk", - "uptime_check_config": "octopus", - } - path = UptimeCheckServiceClient.uptime_check_config_path(**expected) - - # Check that the path construction is reversible. - actual = UptimeCheckServiceClient.parse_uptime_check_config_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = UptimeCheckServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = UptimeCheckServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = UptimeCheckServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = UptimeCheckServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = UptimeCheckServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = UptimeCheckServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = UptimeCheckServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = UptimeCheckServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = UptimeCheckServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = UptimeCheckServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = UptimeCheckServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = UptimeCheckServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = UptimeCheckServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = UptimeCheckServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = UptimeCheckServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.UptimeCheckServiceTransport, '_prep_wrapped_messages') as prep: - client = UptimeCheckServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.UptimeCheckServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = UptimeCheckServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py b/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py index 1228ffca..07ae251a 100644 --- a/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py +++ b/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py @@ -44,7 +44,6 @@ from google.cloud.monitoring_v3.services.notification_channel_service.transports.base import ( _GOOGLE_AUTH_VERSION, ) -from google.cloud.monitoring_v3.types import common from google.cloud.monitoring_v3.types import mutation_record from google.cloud.monitoring_v3.types import notification from google.cloud.monitoring_v3.types import notification_service @@ -987,7 +986,6 @@ def test_get_notification_channel_descriptor( type_="type__value", display_name="display_name_value", description="description_value", - supported_tiers=[common.ServiceTier.SERVICE_TIER_BASIC], ) response = client.get_notification_channel_descriptor(request) @@ -1002,7 +1000,6 @@ def test_get_notification_channel_descriptor( assert response.type_ == "type__value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.supported_tiers == [common.ServiceTier.SERVICE_TIER_BASIC] def test_get_notification_channel_descriptor_from_dict(): @@ -1050,7 +1047,6 @@ async def test_get_notification_channel_descriptor_async( type_="type__value", display_name="display_name_value", description="description_value", - supported_tiers=[common.ServiceTier.SERVICE_TIER_BASIC], ) ) response = await client.get_notification_channel_descriptor(request) @@ -1066,7 +1062,6 @@ async def test_get_notification_channel_descriptor_async( assert response.type_ == "type__value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.supported_tiers == [common.ServiceTier.SERVICE_TIER_BASIC] @pytest.mark.asyncio From c63b9680464d564a155c2971479a81ebd83eda10 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 27 Aug 2021 16:40:13 +0000 Subject: [PATCH 3/3] work around gapic generator bug --- google/cloud/monitoring_v3/types/service.py | 1 + owlbot.py | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/google/cloud/monitoring_v3/types/service.py b/google/cloud/monitoring_v3/types/service.py index 13112cd7..1cf9bccf 100644 --- a/google/cloud/monitoring_v3/types/service.py +++ b/google/cloud/monitoring_v3/types/service.py @@ -409,6 +409,7 @@ class LatencyCriteria(proto.Message): class Range(proto.Message): r"""Range of numerical values within ``min`` and ``max``. + Attributes: min_ (float): Range minimum. diff --git a/owlbot.py b/owlbot.py index bb935a64..a75121f5 100644 --- a/owlbot.py +++ b/owlbot.py @@ -71,6 +71,14 @@ re.MULTILINE| re.DOTALL ) + # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 + s.replace(library / f"google/cloud/monitoring_{library.name}/types/service.py", + r""". + Attributes:""", + r""".\n + Attributes:""", + ) + # don't copy nox.py, setup.py, README.rst, docs/index.rst excludes = ["nox.py", "setup.py", "README.rst", "docs/index.rst"] s.move(library, excludes=excludes)